mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-04-11 16:19:27 -07:00
Compare commits
1 Commits
94abd0f372
...
v0.4.0
| Author | SHA1 | Date | |
|---|---|---|---|
| e18985fb14 |
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@@ -31,7 +31,8 @@ jobs:
|
|||||||
path: |
|
path: |
|
||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
|
vendor/subminer-yomitan/node_modules
|
||||||
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
|
|||||||
25
.github/workflows/release.yml
vendored
25
.github/workflows/release.yml
vendored
@@ -31,22 +31,23 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
node-version: 22.12.0
|
node-version: 22.12.0
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: bun install --frozen-lockfile
|
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
|
vendor/subminer-yomitan/node_modules
|
||||||
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: bun install --frozen-lockfile
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
|
- name: Build (TypeScript check)
|
||||||
|
run: bun run typecheck
|
||||||
|
|
||||||
- name: Test suite (source)
|
- name: Test suite (source)
|
||||||
run: bun run test:fast
|
run: bun run test:fast
|
||||||
|
|
||||||
@@ -84,6 +85,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22.12.0
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -91,7 +97,8 @@ jobs:
|
|||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/texthooker-ui/node_modules
|
vendor/texthooker-ui/node_modules
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
|
vendor/subminer-yomitan/node_modules
|
||||||
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
@@ -140,6 +147,11 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
bun-version: 1.3.5
|
bun-version: 1.3.5
|
||||||
|
|
||||||
|
- name: Setup Node
|
||||||
|
uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: 22.12.0
|
||||||
|
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: actions/cache@v4
|
uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
@@ -147,7 +159,8 @@ jobs:
|
|||||||
~/.bun/install/cache
|
~/.bun/install/cache
|
||||||
node_modules
|
node_modules
|
||||||
vendor/texthooker-ui/node_modules
|
vendor/texthooker-ui/node_modules
|
||||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json') }}
|
vendor/subminer-yomitan/node_modules
|
||||||
|
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
|
|||||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -5,6 +5,7 @@ node_modules/
|
|||||||
out/
|
out/
|
||||||
dist/
|
dist/
|
||||||
release/
|
release/
|
||||||
|
build/yomitan/
|
||||||
|
|
||||||
# Launcher build artifact (produced by make build-launcher)
|
# Launcher build artifact (produced by make build-launcher)
|
||||||
/subminer
|
/subminer
|
||||||
@@ -36,3 +37,4 @@ tests/*
|
|||||||
.worktrees/
|
.worktrees/
|
||||||
.codex/*
|
.codex/*
|
||||||
.agents/*
|
.agents/*
|
||||||
|
docs/*
|
||||||
|
|||||||
6
.gitmodules
vendored
6
.gitmodules
vendored
@@ -5,6 +5,6 @@
|
|||||||
[submodule "vendor/yomitan-jlpt-vocab"]
|
[submodule "vendor/yomitan-jlpt-vocab"]
|
||||||
path = vendor/yomitan-jlpt-vocab
|
path = vendor/yomitan-jlpt-vocab
|
||||||
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
||||||
[submodule "yomitan-jlpt-vocab"]
|
[submodule "vendor/subminer-yomitan"]
|
||||||
path = vendor/yomitan-jlpt-vocab
|
path = vendor/subminer-yomitan
|
||||||
url = https://github.com/stephenmk/yomitan-jlpt-vocab
|
url = https://github.com/ksyasuda/subminer-yomitan
|
||||||
|
|||||||
2
Makefile
2
Makefile
@@ -98,7 +98,7 @@ ensure-bun:
|
|||||||
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
|
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
|
||||||
|
|
||||||
pretty: ensure-bun
|
pretty: ensure-bun
|
||||||
@bun run format
|
@bun run format:src
|
||||||
|
|
||||||
build:
|
build:
|
||||||
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
|
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"
|
||||||
|
|||||||
37
README.md
37
README.md
@@ -33,6 +33,7 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
|
|||||||
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
||||||
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
||||||
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
||||||
|
- **Annotated websocket API** — Dedicated annotation feed can serve bundled texthooker or external clients with rendered `sentence` HTML plus structured `tokens`
|
||||||
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
|
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
|
||||||
- **AniList progress** — Track episode completion and push watching progress automatically
|
- **AniList progress** — Track episode completion and push watching progress automatically
|
||||||
|
|
||||||
@@ -53,30 +54,36 @@ chmod +x ~/.local/bin/subminer
|
|||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
|
||||||
|
|
||||||
**From source** or **macOS** — see the [installation guide](https://docs.subminer.moe/installation#from-source).
|
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
|
||||||
|
|
||||||
### 2. Install the mpv plugin and configuration file
|
### 2. Launch the app once
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
wget https://github.com/ksyasuda/SubMiner/releases/latest/download/subminer-assets.tar.gz -O /tmp/subminer-assets.tar.gz
|
SubMiner.AppImage
|
||||||
tar -xzf /tmp/subminer-assets.tar.gz -C /tmp
|
|
||||||
mkdir -p ~/.config/mpv/scripts/subminer
|
|
||||||
mkdir -p ~/.config/mpv/script-opts
|
|
||||||
cp -R /tmp/plugin/subminer/. ~/.config/mpv/scripts/subminer/
|
|
||||||
cp /tmp/plugin/subminer.conf ~/.config/mpv/script-opts/
|
|
||||||
mkdir -p ~/.config/SubMiner && cp /tmp/config.example.jsonc ~/.config/SubMiner/config.jsonc
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### 3. Set up Yomitan Dictionaries
|
On first launch, SubMiner now:
|
||||||
|
|
||||||
```bash
|
- starts in the tray/background
|
||||||
subminer app --yomitan
|
- creates the default config directory and `config.jsonc`
|
||||||
```
|
- opens a compact setup popup
|
||||||
|
- can install the mpv plugin to the default mpv scripts location for you
|
||||||
|
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
||||||
|
|
||||||
|
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
|
||||||
|
|
||||||
|
### 3. Finish setup
|
||||||
|
|
||||||
|
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
||||||
|
- click `Open Yomitan Settings` and install at least one dictionary
|
||||||
|
- click `Refresh status`
|
||||||
|
- click `Finish setup`
|
||||||
|
|
||||||
|
The mpv plugin step is optional. Yomitan must report at least one installed dictionary before setup can be completed.
|
||||||
|
|
||||||
### 4. Mine
|
### 4. Mine
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
subminer app --start --background
|
|
||||||
subminer video.mkv # default plugin config auto-starts visible overlay + resumes playback when ready
|
subminer video.mkv # default plugin config auto-starts visible overlay + resumes playback when ready
|
||||||
subminer --start video.mkv # optional explicit overlay start when plugin auto_start=no
|
subminer --start video.mkv # optional explicit overlay start when plugin auto_start=no
|
||||||
```
|
```
|
||||||
@@ -85,7 +92,7 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
|
|||||||
|
|
||||||
| Required | Optional |
|
| Required | Optional |
|
||||||
| ------------------------------------------ | -------------------------------------------------- |
|
| ------------------------------------------ | -------------------------------------------------- |
|
||||||
| `bun` | |
|
| `bun`, `node` 22, `npm` | |
|
||||||
| `mpv` with IPC socket | `yt-dlp` |
|
| `mpv` with IPC socket | `yt-dlp` |
|
||||||
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
|
||||||
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
|
||||||
|
|||||||
@@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
id: TASK-100
|
||||||
|
title: 'Add configurable texthooker startup launch'
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-06 23:30'
|
||||||
|
updated_date: '2026-03-07 01:59'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
ordinal: 10000
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Add a config option under `texthooker` to launch the built-in texthooker server automatically when SubMiner starts.
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
- Add `texthooker.launchAtStartup`.
|
||||||
|
- Default to `true`.
|
||||||
|
- Start the existing texthooker server during normal app startup when enabled.
|
||||||
|
- Keep `texthooker.openBrowser` as separate behavior.
|
||||||
|
- Add regression coverage and update generated config docs/example.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Default config enables automatic texthooker startup.
|
||||||
|
- [x] #2 Config parser accepts valid boolean values and warns on invalid values.
|
||||||
|
- [x] #3 App-ready startup launches texthooker when enabled.
|
||||||
|
- [x] #4 Generated config template/example documents the new option.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Added `texthooker.launchAtStartup` with a default of `true`, wired it through config defaults/validation/template generation, and started the existing texthooker server during app-ready startup without coupling it to browser auto-open behavior.
|
||||||
|
|
||||||
|
Also added regression coverage for config parsing/template output and app-ready dependency wiring, then regenerated the checked-in config example artifacts.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
id: TASK-101
|
||||||
|
title: Index AniList character alternative names in the character dictionary
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 00:00'
|
||||||
|
updated_date: '2026-03-08 00:11'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- anilist
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- src/main/character-dictionary-runtime.ts
|
||||||
|
- src/main/character-dictionary-runtime.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Index AniList character alternative names in generated character dictionaries so aliases like Shadow resolve during subtitle lookup instead of falling through to unrelated generic dictionary entries.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Character fetch reads AniList alternative character names needed for lookup coverage
|
||||||
|
- [x] #2 Generated term banks include alias-derived terms for subtitle lookups like シャドウ
|
||||||
|
- [x] #3 Regression coverage proves alternative-name indexing works end to end
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Character dictionary generation now requests AniList `name.alternative`, indexes those aliases as term candidates, and expands mixed aliases like `Minoru Kagenou (影野ミノル)` into usable outer/inner variants. Also extended kana alias synthesis so the AniList alias `Shadow` emits `シャドウ`, which matches the subtitle token the user hit in The Eminence in Shadow.
|
||||||
|
|
||||||
|
Bumped the character-dictionary snapshot format to invalidate stale cached snapshots, and updated merged-dictionary rebuilds to refresh invalid snapshots before composing the ZIP so old cache files do not hard-fail the merge path.
|
||||||
|
|
||||||
|
Verified with `bun test src/main/character-dictionary-runtime.test.ts` and `bun run tsc --noEmit`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,65 @@
|
|||||||
|
---
|
||||||
|
id: TASK-102
|
||||||
|
title: Quiet default AppImage startup and implicit background launch
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-06 21:20'
|
||||||
|
updated_date: '2026-03-06 21:33'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main-entry-runtime.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/cli-command.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Make the packaged Linux no-arg launch path behave like a quiet background start instead of surfacing startup-only noise.
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
- Treat default background entry launches as implicit `--start --background`.
|
||||||
|
- Keep the `--password-store` diagnostic out of normal startup output.
|
||||||
|
- Suppress known startup-only `node:sqlite` and `lsfg-vk` warnings for the entry/background launch path.
|
||||||
|
- Avoid noisy protocol-registration warnings during normal startup when registration is unsupported.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Initial background launch reaches the start path without logging `No running instance. Use --start to launch the app.`
|
||||||
|
- [x] #2 Default startup no longer emits the `Applied --password-store gnome-libsecret` line at normal log levels.
|
||||||
|
- [x] #3 Entry/background launch sanitization suppresses the observed `ExperimentalWarning: SQLite...` and `lsfg-vk ... unsupported configuration version` startup noise.
|
||||||
|
- [x] #4 Regression coverage documents the new startup behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Normalized no-arg/password-store-only entry launches to append implicit `--start --background`, and upgraded `--background`-only entry launches to include `--start`.
|
||||||
|
|
||||||
|
Applied shared entry env sanitization before loading the main process so default startup strips the `lsfg-vk` Vulkan layer and sets `NODE_NO_WARNINGS=1`; background children keep the same sanitized env.
|
||||||
|
|
||||||
|
Downgraded startup-only protocol-registration failure logging to debug, and routed the Linux password-store diagnostic through the scoped debug logger instead of raw console output.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/main-entry-runtime.test.ts src/main/runtime/anilist-setup-protocol.test.ts src/main/runtime/anilist-setup-protocol-main-deps.test.ts`
|
||||||
|
- `bun run test:fast`
|
||||||
|
|
||||||
|
Note: the final `node --experimental-sqlite --test dist/main/runtime/registry.test.js` step in `bun run test:fast` still prints Node's own experimental SQLite warning because that test command explicitly enables the feature flag outside the app entrypoint.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Default packaged startup is now quiet and behaves like an implicit `--start --background` launch.
|
||||||
|
|
||||||
|
- No-arg AppImage entry launches now append `--start --background`, and `--background`-only launches append the missing `--start`.
|
||||||
|
- Entry/background startup sanitization now suppresses the observed `lsfg-vk` and `node:sqlite` warnings on the app launch path.
|
||||||
|
- Linux password-store and unsupported protocol-registration diagnostics now stay at debug level instead of normal startup output.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
id: TASK-103
|
||||||
|
title: Add dedicated annotation websocket for texthooker
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-07 02:20'
|
||||||
|
updated_date: '2026-03-07 02:20'
|
||||||
|
labels:
|
||||||
|
- texthooker
|
||||||
|
- websocket
|
||||||
|
- subtitle
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Add a separate annotated subtitle websocket for bundled texthooker so token/JLPT/frequency markup is available on a stable dedicated port even when the regular websocket is in `auto` mode and skipped because `mpv_websocket` is installed.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Regular `websocket.enabled: "auto"` behavior remains unchanged and still skips the regular websocket when `mpv_websocket` is installed.
|
||||||
|
- [x] #2 A separate `annotationWebsocket` config controls an independent annotated websocket with default port `6678`.
|
||||||
|
- [x] #3 Bundled texthooker is pointed at the annotation websocket when it is enabled.
|
||||||
|
- [x] #4 Focused regression tests cover config parsing, startup wiring, and texthooker bootstrap injection.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Added `annotationWebsocket.enabled`/`annotationWebsocket.port` with defaults of `true`/`6678`, started that websocket independently from the regular auto-managed websocket, and injected the bundled texthooker websocket URL so it connects to the annotation feed by default.
|
||||||
|
|
||||||
|
Also added focused regression coverage and regenerated the checked-in config examples.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-104
|
||||||
|
title: Mirror overlay annotation hover behavior in vendored texthooker
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-06 21:45'
|
||||||
|
updated_date: '2026-03-06 21:45'
|
||||||
|
labels:
|
||||||
|
- texthooker
|
||||||
|
- subtitle
|
||||||
|
- websocket
|
||||||
|
dependencies:
|
||||||
|
- TASK-103
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/subtitle-ws.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/components/App.svelte
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/line-markup.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/vendor/texthooker-ui/src/app.css
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Bring bundled texthooker annotation rendering closer to the visible overlay. Keep the lightweight texthooker UX, but preserve token metadata for hover, match overlay color-precedence rules across known/N+1/name/frequency/JLPT, expose name-match highlighting as a toggle, and emit a structured annotation payload on the dedicated websocket so non-SubMiner clients can treat it as an API.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Annotation websocket payload includes both rendered `sentence` HTML and structured token metadata for generic clients.
|
||||||
|
- [x] #2 Vendored texthooker preserves annotation metadata attrs needed for hover labels and uses overlay-matching color precedence rules.
|
||||||
|
- [x] #3 Vendored texthooker supports character-name highlighting with a user-facing toggle and standalone-web note.
|
||||||
|
- [x] #4 Hovering annotated texthooker tokens reveals JLPT/frequency metadata without adding the full overlay popup workflow.
|
||||||
|
- [x] #5 Focused serializer, texthooker markup, socket parsing, CSS, and build verification pass.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Extended the dedicated annotation websocket payload to ship `version`, plain `text`, rendered `sentence`, and structured `tokens` metadata while keeping backward-compatible `sentence` consumers working. Updated the vendored texthooker to preserve hover metadata attrs, follow overlay color precedence for known/N+1/name/frequency/JLPT annotations, add a character-name highlight toggle plus standalone-web dictionary note, and render lightweight hover labels for frequency/JLPT metadata. Added focused regression coverage and rebuilt both the vendored texthooker bundle and SubMiner.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
id: TASK-105
|
||||||
|
title: Stop local docs artifact writes after docs repo split
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 00:00'
|
||||||
|
updated_date: '2026-03-07 00:20'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
ordinal: 10500
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Now that user-facing docs live in `../subminer-docs`, first-party scripts in this repo should not keep writing generated artifacts into the local `docs/` tree.
|
||||||
|
|
||||||
|
Scope:
|
||||||
|
|
||||||
|
- Audit first-party scripts/automation for writes to `docs/`.
|
||||||
|
- Keep repo-local outputs only where they are still intentionally owned by this repo.
|
||||||
|
- Repoint generated docs artifacts to `../subminer-docs` when that is the maintained source of truth.
|
||||||
|
- Add regression coverage for the config-example generation path contract.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 The config-example generator no longer writes to `docs/public/config.example.jsonc` inside this repo.
|
||||||
|
- [x] #2 When `../subminer-docs` exists, the generator updates `../subminer-docs/public/config.example.jsonc`.
|
||||||
|
- [x] #3 Automated coverage guards the output-path contract so local docs writes do not regress.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Removed the first-party local `docs/public` config-example write path from `src/generate-config-example.ts` and replaced it with sibling-docs-repo detection that targets `../subminer-docs/public/config.example.jsonc` only when that repo exists.
|
||||||
|
|
||||||
|
Added a project-local regression suite for output-path resolution and artifact writing, wired that suite into the maintained config test lane, and removed the stale generated `docs/public/config.example.jsonc` artifact from the working tree.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,69 @@
|
|||||||
|
---
|
||||||
|
id: TASK-106
|
||||||
|
title: Add first-run setup gate and auto-install flow
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-07 06:10'
|
||||||
|
updated_date: '2026-03-07 06:20'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/shared/setup-state.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-service.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/first-run-setup-window.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/launcher/commands/playback-command.ts
|
||||||
|
priority: high
|
||||||
|
ordinal: 10600
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Replace the current manual install flow with a first-run setup gate:
|
||||||
|
|
||||||
|
- bootstrap the default config dir/config file automatically
|
||||||
|
- detect legacy installs and mark them complete when config + Yomitan dictionaries are already present
|
||||||
|
- open a compact Catppuccin Macchiato setup popup for incomplete installs
|
||||||
|
- optionally install the mpv plugin into the default mpv location
|
||||||
|
- block launcher playback until setup completes, then resume the original playback flow
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 First app launch seeds the default config dir/config file without manual copy steps.
|
||||||
|
- [x] #2 Existing installs with config plus at least one Yomitan dictionary are auto-detected as already complete.
|
||||||
|
- [x] #3 Incomplete installs get a first-run setup popup with mpv plugin install, Yomitan settings, refresh, skip, and finish actions.
|
||||||
|
- [x] #4 Launcher playback waits for setup completion and does not start mpv while setup is incomplete.
|
||||||
|
- [x] #5 Plugin assets are packaged into the Electron bundle and regression tests cover the new flow.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added shared setup-state/config/mpv path helpers so Electron and launcher read the same onboarding state file.
|
||||||
|
|
||||||
|
Introduced a first-run setup service plus compact BrowserWindow popup using Catppuccin Macchiato styling. The popup supports optional mpv plugin install, opening Yomitan settings, status refresh, skip-plugin, and gated finish once at least one Yomitan dictionary is installed.
|
||||||
|
|
||||||
|
Electron startup now bootstraps a default config file, auto-detects legacy-complete installs, adds `--setup` CLI support, exposes a tray `Complete Setup` action while incomplete, and avoids reopening setup once completion is recorded.
|
||||||
|
|
||||||
|
Launcher playback now checks the shared setup-state file before starting mpv. If setup is incomplete, it launches the app with `--background --setup`, waits for completion, and only then proceeds.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun run typecheck`
|
||||||
|
- `bun run test:fast`
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
SubMiner now supports a download-and-launch install flow.
|
||||||
|
|
||||||
|
- First launch auto-creates config and opens setup only when needed.
|
||||||
|
- Existing users with working installs are silently migrated to completed setup.
|
||||||
|
- The setup popup handles optional mpv plugin install and Yomitan dictionary readiness.
|
||||||
|
- Launcher playback is gated on setup completion and resumes automatically afterward.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
id: TASK-107
|
||||||
|
title: 'Fix Yomitan scan-token fallback fragmentation on exact-source misses'
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 01:10'
|
||||||
|
updated_date: '2026-03-07 01:12'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
priority: high
|
||||||
|
ordinal: 9007
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Left-to-right Yomitan scanning can emit bogus fallback tokens when `termsFind` returns entries but none of their headwords carries an exact primary source for the consumed substring. Repro: `だが それでも届かぬ高みがあった` currently yields trailing fragments like `があ` / `た`, which blocks the real `あった` token from receiving frequency highlighting.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Scanner skips `termsFind` fallback entries that are not backed by an exact primary source for the consumed substring.
|
||||||
|
- [x] #2 Repro line no longer yields bogus trailing fragments such as `があ`.
|
||||||
|
- [x] #3 Regression coverage added for the scan-token path.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Removed the scan-token helper fallback that previously emitted a token from the first returned headword even when Yomitan did not report an exact primary source for the consumed substring. Added a focused regression test covering `だが それでも届かぬ高みがあった`, ensuring bogus `があ` fragmentation is skipped so the later `あった` exact match can still be tokenized and highlighted.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/core/services/tokenizer/yomitan-parser-runtime.test.ts src/core/services/tokenizer.test.ts --timeout 20000`
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-108
|
||||||
|
title: 'Exclude single kana tokens from frequency highlighting'
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 01:18'
|
||||||
|
updated_date: '2026-03-07 01:22'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
ordinal: 9008
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Suppress frequency highlighting for single-character hiragana or katakana tokens. Scope is frequency-only: known/N+1/JLPT behavior stays unchanged.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Single-character hiragana tokens do not retain `frequencyRank`.
|
||||||
|
- [x] #2 Single-character katakana tokens do not retain `frequencyRank`.
|
||||||
|
- [x] #3 Regression coverage exists at annotation-stage and tokenizer levels.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Added a frequency-only suppression rule for single-character kana tokens based on token `surface`, so bogus merged fragments like `た` and standalone one-character kana no longer keep `frequencyRank`. Regression coverage now exists both in the annotation stage and in the tokenizer path, while multi-character tokens and N+1/JLPT behavior remain unchanged.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/core/services/tokenizer/annotation-stage.test.ts --timeout 20000`
|
||||||
|
- `bun test src/core/services/tokenizer.test.ts --timeout 20000`
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,51 @@
|
|||||||
|
---
|
||||||
|
id: TASK-110
|
||||||
|
title: Replace vendored Yomitan with submodule-built Chrome artifact workflow
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 11:05'
|
||||||
|
updated_date: '2026-03-07 11:22'
|
||||||
|
labels:
|
||||||
|
- yomitan
|
||||||
|
- build
|
||||||
|
- release
|
||||||
|
dependencies: []
|
||||||
|
priority: high
|
||||||
|
ordinal: 9010
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Replace the checked-in `vendor/yomitan` release tree with a `subminer-yomitan` git submodule. Build Yomitan from source, extract the Chromium zip artifact into a stable local build directory, and make SubMiner dev/runtime/tests/release packaging load that extracted extension instead of the source tree or vendored files.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Repo tracks Yomitan as a git submodule instead of committed extension files under `vendor/yomitan`.
|
||||||
|
- [x] #2 SubMiner has a reproducible build/extract step that produces a local Chromium extension directory from `subminer-yomitan`.
|
||||||
|
- [x] #3 Dev/runtime/tests resolve the extracted build output as the default Yomitan extension path.
|
||||||
|
- [x] #4 Release packaging includes the extracted Chromium extension files instead of the old vendored tree.
|
||||||
|
- [x] #5 Docs and verification commands reflect the new workflow.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Replaced the checked-in `vendor/yomitan` extension tree with a `vendor/subminer-yomitan` git submodule and added a reproducible `bun run build:yomitan` workflow that builds `yomitan-chrome.zip`, extracts it into `build/yomitan`, and reuses a source-state stamp to skip redundant rebuilds. Runtime path resolution, helper CLIs, Yomitan integration tests, packaging, CI cache keys, and README source-build notes now all target that generated artifact instead of the old vendored files.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun run build:yomitan`
|
||||||
|
- `bun test src/core/services/yomitan-extension-paths.test.ts src/core/services/yomitan-structured-content-generator.test.ts src/yomitan-translator-sort.test.ts`
|
||||||
|
- `bun run typecheck`
|
||||||
|
- `bun run build`
|
||||||
|
- `bun run test:core:src`
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
id: TASK-111
|
||||||
|
title: Fix subtitle-cycle OSD labels for J keybindings
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- Codex
|
||||||
|
created_date: '2026-03-07 23:45'
|
||||||
|
updated_date: '2026-03-08 00:06'
|
||||||
|
labels: []
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.ts
|
||||||
|
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.test.ts
|
||||||
|
- >-
|
||||||
|
/Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv-control.test.ts
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
When cycling subtitle tracks with the default J/Shift+J keybindings, the mpv OSD currently shows raw template text like `${sid}` instead of a resolved subtitle label. Update the keybinding OSD behavior so users see the active subtitle selection clearly when cycling tracks, and ensure placeholder-based OSD messages sent through the mpv client API render correctly.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Pressing the primary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${sid}` placeholder.
|
||||||
|
- [x] #2 Pressing the secondary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${secondary-sid}` placeholder.
|
||||||
|
- [x] #3 Proxy OSD messages that rely on mpv property expansion render resolved values when sent through the mpv client API.
|
||||||
|
- [x] #4 Regression tests cover the subtitle-cycle OSD behavior and the placeholder-expansion OSD path.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add focused failing tests for subtitle-cycle OSD labels and mpv placeholder-expansion behavior.
|
||||||
|
2. Update the IPC mpv command handler to resolve primary and secondary subtitle track labels from mpv `track-list` data after cycling subtitle tracks.
|
||||||
|
3. Update the mpv OSD runtime path so placeholder-based `show-text` messages sent through the client API opt into property expansion.
|
||||||
|
4. Run focused tests, then the relevant core test lane, and record results in the task notes.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Initial triage: `ipc-command.ts` emits raw `${sid}`/`${secondary-sid}` placeholder strings, and `showMpvOsdRuntime` sends `show-text` via mpv client API without enabling property expansion.
|
||||||
|
|
||||||
|
User approved implementation plan on 2026-03-07.
|
||||||
|
|
||||||
|
Implementation: proxy mpv command OSD now supports an async resolver so subtitle track cycling can show human-readable labels instead of raw `${sid}` placeholders.
|
||||||
|
|
||||||
|
Implementation: `showMpvOsdRuntime` now prefixes placeholder-based messages with mpv client-api `expand-properties`, which fixes raw `${...}` OSD output for subtitle delay/position messages.
|
||||||
|
|
||||||
|
Testing: `bun test src/core/services/ipc-command.test.ts src/core/services/mpv-control.test.ts src/main/runtime/mpv-proxy-osd.test.ts src/main/runtime/ipc-mpv-command-main-deps.test.ts src/main/runtime/ipc-bridge-actions.test.ts src/main/runtime/ipc-bridge-actions-main-deps.test.ts src/main/runtime/composers/ipc-runtime-composer.test.ts` passed.
|
||||||
|
|
||||||
|
Testing: `bun x tsc --noEmit` passed.
|
||||||
|
|
||||||
|
Testing: `bun run test:core:src` passed (423 pass, 6 skip, 0 fail).
|
||||||
|
|
||||||
|
Docs: no update required because no checked-in docs or help text describe the J/Shift+J OSD output behavior.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Fixed subtitle-cycle OSD handling for the default J/Shift+J keybindings. The IPC mpv command path now supports resolving proxy OSD text asynchronously, and the main-runtime resolver reads mpv `track-list` state so primary and secondary subtitle cycling show human-readable track labels instead of raw `${sid}` / `${secondary-sid}` placeholders.
|
||||||
|
|
||||||
|
Also fixed the lower-level mpv OSD transport so placeholder-based `show-text` messages sent through the client API opt into `expand-properties`. That preserves existing template-based OSD messages like subtitle delay and subtitle position without leaking the raw `${...}` syntax.
|
||||||
|
|
||||||
|
Added regression coverage for the async proxy OSD path, the placeholder-expansion `showMpvOsdRuntime` path, and the runtime subtitle-track label resolver. Verification run: `bun x tsc --noEmit`; focused mpv/IPC tests; and the maintained `bun run test:core:src` lane (423 pass, 6 skip, 0 fail).
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
---
|
||||||
|
id: TASK-112
|
||||||
|
title: Address Claude review items on PR 15
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 00:11'
|
||||||
|
updated_date: '2026-03-08 00:12'
|
||||||
|
labels:
|
||||||
|
- pr-review
|
||||||
|
- ci
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- .github/workflows/ci.yml
|
||||||
|
- .gitmodules
|
||||||
|
- >-
|
||||||
|
backlog/tasks/task-101 -
|
||||||
|
Index-AniList-character-alternative-names-in-the-character-dictionary.md
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Review Claude's PR feedback on PR #15, implement only the technically valid fixes on the current branch, and document which comments are non-actionable or already acceptable.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Validated Claude's concrete PR review items against current branch state and repo conventions
|
||||||
|
- [x] #2 Implemented the accepted fixes with regression coverage or verification where applicable
|
||||||
|
- [x] #3 Documented which review items are non-blocking or intentionally left unchanged
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Validate each Claude review item against current branch files and repo workflow.
|
||||||
|
2. Patch release quality-gate to match CI ordering and add explicit typecheck.
|
||||||
|
3. Remove duplicate .gitmodules stanza and normalize the TASK-101 reference path through Backlog MCP.
|
||||||
|
4. Run relevant verification for workflow/config metadata changes and record which review items remain non-actionable.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
User asked to address Claude PR comments on PR #15 and assess whether any action items remain. Treat review suggestions skeptically; only fix validated defects.
|
||||||
|
|
||||||
|
Validated Claude's five review items. Fixed release workflow ordering/typecheck, removed the duplicate .gitmodules entry, and normalized TASK-101 references to repo-relative paths via Backlog MCP.
|
||||||
|
|
||||||
|
Left the vendor/subminer-yomitan branch-pin suggestion unchanged. The committed submodule SHA already controls reproducibility; adding a branch would only affect update ergonomics and was not required to address a concrete defect.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Validated Claude's PR #15 review summary against the current branch and applied the actionable fixes. In `.github/workflows/release.yml`, the release `quality-gate` job now restores the dependency cache before installation, no longer installs twice, and runs `bun run typecheck` before the fast test suite to match CI expectations. In `.gitmodules`, removed the duplicate `vendor/yomitan-jlpt-vocab` stanza with the conflicting duplicate path. Through Backlog MCP, updated `TASK-101` references from an absolute local path to repo-relative paths so the task metadata is portable across contributors.
|
||||||
|
|
||||||
|
Verification: `git diff --check`, `git config -f .gitmodules --get-regexp '^submodule\..*\.path$'`, `bun run typecheck`, and `bun run test:fast` all passed. `bun run format:check` still fails on many pre-existing unrelated files already present on the branch, including multiple backlog task files and existing source/docs files; this review patch did not attempt a repo-wide formatting sweep.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
id: TASK-113
|
||||||
|
title: Scope make pretty to maintained source files
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 00:20'
|
||||||
|
updated_date: '2026-03-08 00:22'
|
||||||
|
labels:
|
||||||
|
- tooling
|
||||||
|
- formatting
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- Makefile
|
||||||
|
- package.json
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Change the `make pretty` workflow so it formats only the maintained source/config files we intentionally keep under Prettier, instead of sweeping backlog/docs/generated content across the whole repository.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 `make pretty` formats only the approved maintained source/config paths
|
||||||
|
- [x] #2 The allowlist is reusable for check/write flows instead of duplicating path logic
|
||||||
|
- [x] #3 Verification shows the scoped formatting command targets the intended files without touching backlog or vendored content
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Inspect current Prettier config/ignore behavior and keep the broad repo-wide format command unchanged.
|
||||||
|
2. Add a reusable scoped Prettier script that targets maintained source/config paths only.
|
||||||
|
3. Update `make pretty` to call the scoped script.
|
||||||
|
4. Verify the scoped command resolves only intended files and does not traverse backlog or vendor paths.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
User approved the allowlist approach: keep repo-wide `format` intact, make `make pretty` use a maintained-path formatter scope.
|
||||||
|
|
||||||
|
Added `scripts/prettier-scope.sh` as the single allowlist for scoped Prettier paths and wired `format:src` / `format:check:src` to it.
|
||||||
|
|
||||||
|
Updated `make pretty` to call `bun run format:src`. Verified with `make -n pretty` and shell tracing that the helper only targets the maintained allowlist and does not traverse `backlog/` or `vendor/`.
|
||||||
|
|
||||||
|
Excluded `Makefile` and `.prettierignore` from the allowlist after verification showed Prettier cannot infer parsers for them.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Scoped the repo's day-to-day formatting entrypoint without changing the existing broad repo-wide Prettier scripts. Added `scripts/prettier-scope.sh` as the shared allowlist for maintained source/config paths (`.github`, `build`, `launcher`, `scripts`, `src`, plus selected root JSON config files), added `format:src` and `format:check:src` in `package.json`, and updated `make pretty` to run the scoped formatter.
|
||||||
|
|
||||||
|
Verification: `make -n pretty` now resolves to `bun run format:src`. `bash -n scripts/prettier-scope.sh` passed, and shell-traced `bash -x scripts/prettier-scope.sh --check` confirmed the exact allowlist passed to Prettier. `bun run format:check:src` fails only because existing files inside the allowed source scope are not currently formatted; it no longer touches `backlog/` or `vendor/`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
62
backlog/tasks/task-114 - Fix-failing-CI-checks-on-PR-15.md
Normal file
62
backlog/tasks/task-114 - Fix-failing-CI-checks-on-PR-15.md
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
---
|
||||||
|
id: TASK-114
|
||||||
|
title: Fix failing CI checks on PR 15
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 00:34'
|
||||||
|
updated_date: '2026-03-08 00:37'
|
||||||
|
labels:
|
||||||
|
- ci
|
||||||
|
- test
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- src/renderer/subtitle-render.test.ts
|
||||||
|
- src/renderer/style.css
|
||||||
|
- .github/workflows/ci.yml
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Investigate the failing GitHub Actions CI run for PR #15 on branch `yomitan-fork`, fix the underlying test or code regression, and verify the affected local test/CI lane passes.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Identified the concrete failing CI job and captured the relevant failure context
|
||||||
|
- [x] #2 Implemented the minimal code or test change needed to resolve the CI failure
|
||||||
|
- [x] #3 Verified the affected local test target and the broader fast CI test lane pass
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Inspect the failing GitHub Actions run and confirm the exact failing test/assertion.
|
||||||
|
2. Reproduce the failing renderer stylesheet test locally and compare the assertion against current CSS.
|
||||||
|
3. Apply the minimal test or stylesheet fix needed to restore the intended hover/selection behavior.
|
||||||
|
4. Re-run the targeted renderer test, then re-run `bun run test` to verify the fast CI lane is green.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
GitHub Actions run 22810400921 failed in job build-test-audit, step `Test suite (source)`, with a single failing test: `JLPT CSS rules use underline-only styling in renderer stylesheet` in src/renderer/subtitle-render.test.ts.
|
||||||
|
|
||||||
|
Reproduced the failing test locally with `bun test src/renderer/subtitle-render.test.ts`. The failure was a brittle stylesheet assertion, not a renderer behavior regression.
|
||||||
|
|
||||||
|
Updated the renderer stylesheet test helper to split selectors safely across `:is(...)` commas and normalize multiline selector whitespace, then switched the failing hover/JLPT assertions to inspect extracted rule blocks instead of matching the entire CSS file text.
|
||||||
|
|
||||||
|
Verification passed with `bun test src/renderer/subtitle-render.test.ts` and `bun run test`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Investigated GitHub Actions CI run `22810400921` for PR #15 and confirmed the only failing job was `build-test-audit`, step `Test suite (source)`, with a single failure in `src/renderer/subtitle-render.test.ts` (`JLPT CSS rules use underline-only styling in renderer stylesheet`).
|
||||||
|
|
||||||
|
The renderer CSS itself was still correct; the regression was in the test helper. `extractClassBlock` was splitting selector lists on every comma, which breaks selectors containing `:is(...)`, and the affected assertions fell back to brittle whole-file regex matching against a multiline selector. Fixed the test by teaching the helper to split selectors only at top-level commas, normalizing selector whitespace around multiline `:not(...)` / `:is(...)` clauses, and asserting on extracted rule blocks for the plain-word hover and JLPT-only hover/selection rules.
|
||||||
|
|
||||||
|
Verification: `bun test src/renderer/subtitle-render.test.ts` passed, and `bun run test` passed end to end (the same fast lane that failed in CI).
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,59 @@
|
|||||||
|
---
|
||||||
|
id: TASK-115
|
||||||
|
title: Refresh subminer-docs contributor docs for current repo workflow
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 00:40'
|
||||||
|
updated_date: '2026-03-08 00:42'
|
||||||
|
labels:
|
||||||
|
- docs
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- ../subminer-docs/development.md
|
||||||
|
- ../subminer-docs/README.md
|
||||||
|
- Makefile
|
||||||
|
- package.json
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Update the sibling `subminer-docs` repo so contributor/development docs match the current SubMiner repo workflow after the docs split and recent tooling changes, including removing stale in-repo docs build steps and documenting the scoped formatting command.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Contributor docs in `subminer-docs` no longer reference stale in-repo docs build commands for the app repo
|
||||||
|
- [x] #2 Contributor docs mention the current scoped formatting workflow (`make pretty` / `format:src`) where relevant
|
||||||
|
- [x] #3 Removed stale or no-longer-needed instructions that no longer match the current repo layout
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Inspect `subminer-docs` for contributor/development instructions that drifted after the docs repo split and recent tooling changes.
|
||||||
|
2. Update contributor docs to remove stale app-repo docs commands and document the current scoped formatting workflow.
|
||||||
|
3. Verify the modified docs page and build the docs site from the sibling docs repo when local dependencies are available.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Detected concrete doc drift in `subminer-docs/development.md`: stale in-repo docs build commands and no mention of the scoped `make pretty` formatter.
|
||||||
|
|
||||||
|
Updated `../subminer-docs/development.md` to remove stale app-repo docs build steps from the local gate, document `make pretty` / `format:check:src`, and point docs-site work to the sibling docs repo explicitly.
|
||||||
|
|
||||||
|
Installed docs repo dependencies locally with `bun install` and verified the docs site with `bun run docs:build` in `../subminer-docs`.
|
||||||
|
|
||||||
|
Did not change `../subminer-docs/README.md`; it was already accurate for the docs repo itself.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Refreshed the contributor/development docs in the sibling `subminer-docs` repo to match the current SubMiner workflow. In `development.md`, removed the stale app-repo `bun run docs:build` step from the local CI-equivalent gate, added an explicit note to run docs builds from `../subminer-docs` when docs change, documented the scoped formatting workflow (`make pretty` and `bun run format:check:src`), and replaced the old in-repo `make docs*` instructions with the correct sibling-repo `bun run docs:*` commands. Also updated the Makefile reference to include `make pretty` and removed the obsolete `make docs-dev` entry.
|
||||||
|
|
||||||
|
Verification: installed docs repo dependencies with `bun install` in `../subminer-docs` and ran `bun run docs:build` successfully. Left `README.md` unchanged because it was already accurate for the standalone docs repo.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
id: TASK-116
|
||||||
|
title: Audit branch commits for remaining subminer-docs updates
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 00:46'
|
||||||
|
updated_date: '2026-03-08 00:48'
|
||||||
|
labels:
|
||||||
|
- docs
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- ../subminer-docs/installation.md
|
||||||
|
- ../subminer-docs/troubleshooting.md
|
||||||
|
- src/core/services/yomitan-extension-paths.ts
|
||||||
|
- scripts/build-yomitan.mjs
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Review recent `yomitan-fork` commits against the sibling `subminer-docs` repo, identify any concrete documentation drift that remains after the earlier contributor-doc updates, and patch the docs for behavior/tooling changes that are now outdated or misleading.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Reviewed recent branch commits for user-facing or contributor-facing changes that may require docs updates
|
||||||
|
- [x] #2 Updated `subminer-docs` pages where branch changes introduced concrete doc drift
|
||||||
|
- [x] #3 Verified the docs site still builds after the updates
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Review branch commit themes against `subminer-docs` and identify only concrete drift introduced by recent workflow/runtime changes.
|
||||||
|
2. Patch docs for the Yomitan submodule build workflow, updated source-build prerequisites, and current runtime Yomitan search paths/manual fallback path.
|
||||||
|
3. Rebuild the docs site to verify the updated pages render cleanly.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Concrete remaining drift after commit audit: installation/development docs still understate the Node/npm + submodule requirements for the Yomitan build flow, and troubleshooting still points at obsolete `vendor/yomitan` / `extensions/yomitan` paths.
|
||||||
|
|
||||||
|
Audited branch commits against subminer-docs coverage. Existing docs already cover first-run setup, texthooker startup/annotated websocket config, AniList merged character dictionaries, configurable collapsible sections, and subtitle name highlighting. Patched remaining drift around source-build prerequisites and Yomitan build/install paths in installation.md, development.md, and troubleshooting.md. Verified with `bun run docs:build` in ../subminer-docs.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Audited branch commits for missing documentation updates in ../subminer-docs. Updated installation, development, and troubleshooting docs to match the current Yomitan submodule build flow, source-build prerequisites, and runtime extension search/manual fallback paths. Confirmed other recent branch features were already documented and rebuilt the docs site successfully.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
id: TASK-99
|
||||||
|
title: Add configurable character dictionary collapsible section open states
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-07 00:00'
|
||||||
|
updated_date: '2026-03-07 00:00'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- config
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/config/resolve/integrations.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-integrations.ts
|
||||||
|
priority: medium
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Add per-section config for character dictionary collapsible glossary sections so Description, Character Information, and Voiced by can each default open or closed independently. Default all sections closed.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Config supports `anilist.characterDictionary.collapsibleSections.description`.
|
||||||
|
- [x] #2 Config supports `anilist.characterDictionary.collapsibleSections.characterInformation`.
|
||||||
|
- [x] #3 Config supports `anilist.characterDictionary.collapsibleSections.voicedBy`.
|
||||||
|
- [x] #4 Default config keeps all generated character dictionary collapsible sections closed.
|
||||||
|
- [x] #5 Regression coverage verifies config parsing/warnings and generated glossary `details.open` behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Added per-section open-state config under `anilist.characterDictionary.collapsibleSections` for `description`, `characterInformation`, and `voicedBy`, all defaulting to `false`. Wired the glossary generator to read those settings so generated `details.open` matches config, and added regression coverage for defaults, parsing/warnings, registry exposure, and runtime glossary output.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -5,6 +5,7 @@
|
|||||||
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
||||||
*/
|
*/
|
||||||
{
|
{
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Overlay Auto-Start
|
// Overlay Auto-Start
|
||||||
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
||||||
@@ -13,11 +14,12 @@
|
|||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Texthooker Server
|
// Texthooker Server
|
||||||
// Control whether browser opens automatically for texthooker.
|
// Configure texthooker startup launch and browser opening behavior.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"texthooker": {
|
"texthooker": {
|
||||||
"openBrowser": true, // Open browser setting. Values: true | false
|
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
|
||||||
}, // Control whether browser opens automatically for texthooker.
|
"openBrowser": true // Open browser setting. Values: true | false
|
||||||
|
}, // Configure texthooker startup launch and browser opening behavior.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// WebSocket Server
|
// WebSocket Server
|
||||||
@@ -26,17 +28,41 @@
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
"websocket": {
|
"websocket": {
|
||||||
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
||||||
"port": 6677, // Built-in subtitle websocket server port.
|
"port": 6677 // Built-in subtitle websocket server port.
|
||||||
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Annotation WebSocket
|
||||||
|
// Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
||||||
|
// Independent from websocket.auto and defaults to port 6678.
|
||||||
|
// ==========================================
|
||||||
|
"annotationWebsocket": {
|
||||||
|
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
|
||||||
|
"port": 6678 // Annotated subtitle websocket server port.
|
||||||
|
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Logging
|
// Logging
|
||||||
// Controls logging verbosity.
|
// Controls logging verbosity.
|
||||||
// Set to debug for full runtime diagnostics.
|
// Set to debug for full runtime diagnostics.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
|
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
|
||||||
}, // Controls logging verbosity. Keep this as an object; do not replace with a bare string.
|
}, // Controls logging verbosity.
|
||||||
|
|
||||||
|
// ==========================================
|
||||||
|
// Startup Warmups
|
||||||
|
// Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
||||||
|
// Disable individual warmups to defer load until first real usage.
|
||||||
|
// lowPowerMode defers all warmups except Yomitan extension.
|
||||||
|
// ==========================================
|
||||||
|
"startupWarmups": {
|
||||||
|
"lowPowerMode": false, // Defer startup warmups except Yomitan extension. Values: true | false
|
||||||
|
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
|
||||||
|
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
|
||||||
|
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
|
||||||
|
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
|
||||||
|
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Keyboard Shortcuts
|
// Keyboard Shortcuts
|
||||||
@@ -56,7 +82,7 @@
|
|||||||
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
||||||
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
||||||
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
||||||
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
|
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
|
||||||
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -76,7 +102,7 @@
|
|||||||
"secondarySub": {
|
"secondarySub": {
|
||||||
"secondarySubLanguages": [], // Secondary sub languages setting.
|
"secondarySubLanguages": [], // Secondary sub languages setting.
|
||||||
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
||||||
"defaultMode": "hover", // Default mode setting.
|
"defaultMode": "hover" // Default mode setting.
|
||||||
}, // Dual subtitle track options.
|
}, // Dual subtitle track options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -88,7 +114,7 @@
|
|||||||
"alass_path": "", // Alass path setting.
|
"alass_path": "", // Alass path setting.
|
||||||
"ffsubsync_path": "", // Ffsubsync path setting.
|
"ffsubsync_path": "", // Ffsubsync path setting.
|
||||||
"ffmpeg_path": "", // Ffmpeg path setting.
|
"ffmpeg_path": "", // Ffmpeg path setting.
|
||||||
"replace": true, // Replace active subtitle file when synchronization succeeds.
|
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
|
||||||
}, // Subsync engine and executable paths.
|
}, // Subsync engine and executable paths.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -96,7 +122,7 @@
|
|||||||
// Initial vertical subtitle position from the bottom.
|
// Initial vertical subtitle position from the bottom.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"subtitlePosition": {
|
"subtitlePosition": {
|
||||||
"yPercent": 10, // Y percent setting.
|
"yPercent": 10 // Y percent setting.
|
||||||
}, // Initial vertical subtitle position from the bottom.
|
}, // Initial vertical subtitle position from the bottom.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -108,8 +134,11 @@
|
|||||||
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
|
"enableJlpt": false, // Enable JLPT vocabulary level underlines. When disabled, JLPT tagging lookup and underlines are skipped. Values: true | false
|
||||||
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
|
"preserveLineBreaks": false, // Preserve line breaks in visible overlay subtitle rendering. When false, line breaks are flattened to spaces for a single-line flow. Values: true | false
|
||||||
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
|
"autoPauseVideoOnHover": true, // Automatically pause mpv playback while hovering subtitle text, then resume on leave. Values: true | false
|
||||||
|
"autoPauseVideoOnYomitanPopup": false, // Automatically pause mpv playback while Yomitan popup is open, then resume when popup closes. Values: true | false
|
||||||
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
|
"hoverTokenColor": "#f4dbd6", // Hex color used for hovered subtitle token highlight in mpv.
|
||||||
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
|
"hoverTokenBackgroundColor": "rgba(54, 58, 79, 0.84)", // CSS color used for hovered subtitle token background highlight in mpv.
|
||||||
|
"nameMatchEnabled": true, // Enable subtitle token coloring for matches from the SubMiner character dictionary. Values: true | false
|
||||||
|
"nameMatchColor": "#f5bde6", // Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.
|
||||||
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.
|
"fontFamily": "M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP", // Font family setting.
|
||||||
"fontSize": 35, // Font size setting.
|
"fontSize": 35, // Font size setting.
|
||||||
"fontColor": "#cad3f5", // Font color setting.
|
"fontColor": "#cad3f5", // Font color setting.
|
||||||
@@ -130,16 +159,22 @@
|
|||||||
"N2": "#f5a97f", // N2 setting.
|
"N2": "#f5a97f", // N2 setting.
|
||||||
"N3": "#f9e2af", // N3 setting.
|
"N3": "#f9e2af", // N3 setting.
|
||||||
"N4": "#a6e3a1", // N4 setting.
|
"N4": "#a6e3a1", // N4 setting.
|
||||||
"N5": "#8aadf4", // N5 setting.
|
"N5": "#8aadf4" // N5 setting.
|
||||||
}, // Jlpt colors setting.
|
}, // Jlpt colors setting.
|
||||||
"frequencyDictionary": {
|
"frequencyDictionary": {
|
||||||
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
||||||
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, SubMiner searches installed/default frequency-dictionary locations.
|
"sourcePath": "", // Optional absolute path to a frequency dictionary directory. If empty, built-in discovery search paths are used.
|
||||||
"topX": 1000, // Only color tokens with frequency rank <= topX (default: 1000).
|
"topX": 1000, // Only color tokens with frequency rank <= topX (default: 1000).
|
||||||
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
||||||
"matchMode": "headword", // Frequency lookup text selection mode. Values: headword | surface
|
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
|
||||||
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
||||||
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#8bd5ca", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
"bandedColors": [
|
||||||
|
"#ed8796",
|
||||||
|
"#f5a97f",
|
||||||
|
"#f9e2af",
|
||||||
|
"#8bd5ca",
|
||||||
|
"#8aadf4"
|
||||||
|
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
||||||
}, // Frequency dictionary setting.
|
}, // Frequency dictionary setting.
|
||||||
"secondary": {
|
"secondary": {
|
||||||
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
||||||
@@ -154,8 +189,8 @@
|
|||||||
"backgroundColor": "transparent", // Background color setting.
|
"backgroundColor": "transparent", // Background color setting.
|
||||||
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
||||||
"fontWeight": "normal", // Font weight setting.
|
"fontWeight": "normal", // Font weight setting.
|
||||||
"fontStyle": "normal", // Font style setting.
|
"fontStyle": "normal" // Font style setting.
|
||||||
}, // Secondary setting.
|
} // Secondary setting.
|
||||||
}, // Primary and secondary subtitle styling.
|
}, // Primary and secondary subtitle styling.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -169,18 +204,20 @@
|
|||||||
"url": "http://127.0.0.1:8765", // Url setting.
|
"url": "http://127.0.0.1:8765", // Url setting.
|
||||||
"pollingRate": 3000, // Polling interval in milliseconds.
|
"pollingRate": 3000, // Polling interval in milliseconds.
|
||||||
"proxy": {
|
"proxy": {
|
||||||
"enabled": false, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
||||||
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
||||||
"port": 8766, // Bind port for local AnkiConnect proxy.
|
"port": 8766, // Bind port for local AnkiConnect proxy.
|
||||||
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
||||||
}, // Proxy setting.
|
}, // Proxy setting.
|
||||||
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
"tags": [
|
||||||
|
"SubMiner"
|
||||||
|
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
||||||
"fields": {
|
"fields": {
|
||||||
"audio": "ExpressionAudio", // Audio setting.
|
"audio": "ExpressionAudio", // Audio setting.
|
||||||
"image": "Picture", // Image setting.
|
"image": "Picture", // Image setting.
|
||||||
"sentence": "Sentence", // Sentence setting.
|
"sentence": "Sentence", // Sentence setting.
|
||||||
"miscInfo": "MiscInfo", // Misc info setting.
|
"miscInfo": "MiscInfo", // Misc info setting.
|
||||||
"translation": "SelectionText", // Translation setting.
|
"translation": "SelectionText" // Translation setting.
|
||||||
}, // Fields setting.
|
}, // Fields setting.
|
||||||
"ai": {
|
"ai": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
@@ -189,7 +226,7 @@
|
|||||||
"model": "openai/gpt-4o-mini", // Model setting.
|
"model": "openai/gpt-4o-mini", // Model setting.
|
||||||
"baseUrl": "https://openrouter.ai/api", // Base url setting.
|
"baseUrl": "https://openrouter.ai/api", // Base url setting.
|
||||||
"targetLanguage": "English", // Target language setting.
|
"targetLanguage": "English", // Target language setting.
|
||||||
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
|
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations." // System prompt setting.
|
||||||
}, // Ai setting.
|
}, // Ai setting.
|
||||||
"media": {
|
"media": {
|
||||||
"generateAudio": true, // Generate audio setting. Values: true | false
|
"generateAudio": true, // Generate audio setting. Values: true | false
|
||||||
@@ -202,7 +239,7 @@
|
|||||||
"animatedCrf": 35, // Animated crf setting.
|
"animatedCrf": 35, // Animated crf setting.
|
||||||
"audioPadding": 0.5, // Audio padding setting.
|
"audioPadding": 0.5, // Audio padding setting.
|
||||||
"fallbackDuration": 3, // Fallback duration setting.
|
"fallbackDuration": 3, // Fallback duration setting.
|
||||||
"maxMediaDuration": 30, // Max media duration setting.
|
"maxMediaDuration": 30 // Max media duration setting.
|
||||||
}, // Media setting.
|
}, // Media setting.
|
||||||
"behavior": {
|
"behavior": {
|
||||||
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
||||||
@@ -210,7 +247,7 @@
|
|||||||
"mediaInsertMode": "append", // Media insert mode setting.
|
"mediaInsertMode": "append", // Media insert mode setting.
|
||||||
"highlightWord": true, // Highlight word setting. Values: true | false
|
"highlightWord": true, // Highlight word setting. Values: true | false
|
||||||
"notificationType": "osd", // Notification type setting.
|
"notificationType": "osd", // Notification type setting.
|
||||||
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
|
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
|
||||||
}, // Behavior setting.
|
}, // Behavior setting.
|
||||||
"nPlusOne": {
|
"nPlusOne": {
|
||||||
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
||||||
@@ -219,20 +256,20 @@
|
|||||||
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
||||||
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
||||||
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
||||||
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
|
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
|
||||||
}, // N plus one setting.
|
}, // N plus one setting.
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
|
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
|
||||||
}, // Metadata setting.
|
}, // Metadata setting.
|
||||||
"isLapis": {
|
"isLapis": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
|
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
|
||||||
}, // Is lapis setting.
|
}, // Is lapis setting.
|
||||||
"isKiku": {
|
"isKiku": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
||||||
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
|
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
|
||||||
}, // Is kiku setting.
|
} // Is kiku setting.
|
||||||
}, // Automatic Anki updates and media generation options.
|
}, // Automatic Anki updates and media generation options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -242,7 +279,7 @@
|
|||||||
"jimaku": {
|
"jimaku": {
|
||||||
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
||||||
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
||||||
"maxEntryResults": 10, // Maximum Jimaku search results returned.
|
"maxEntryResults": 10 // Maximum Jimaku search results returned.
|
||||||
}, // Jimaku API configuration and defaults.
|
}, // Jimaku API configuration and defaults.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -253,16 +290,33 @@
|
|||||||
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
|
"mode": "automatic", // YouTube subtitle generation mode for the launcher script. Values: automatic | preprocess | off
|
||||||
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
"whisperBin": "", // Path to whisper.cpp CLI used as fallback transcription engine.
|
||||||
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
"whisperModel": "", // Path to whisper model used for fallback transcription.
|
||||||
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
|
"primarySubLanguages": [
|
||||||
|
"ja",
|
||||||
|
"jpn"
|
||||||
|
] // Comma-separated primary subtitle language priority used by the launcher.
|
||||||
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
|
}, // Defaults for subminer YouTube subtitle extraction/transcription mode.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Anilist
|
// Anilist
|
||||||
// Anilist API credentials and update behavior.
|
// Anilist API credentials and update behavior.
|
||||||
|
// Includes optional auto-sync for a merged MRU-based character dictionary in bundled Yomitan.
|
||||||
|
// Character dictionaries are keyed by AniList media ID (no season/franchise merge).
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"anilist": {
|
"anilist": {
|
||||||
"enabled": false, // Enable AniList post-watch progress updates. Values: true | false
|
"enabled": false, // Enable AniList post-watch progress updates. Values: true | false
|
||||||
"accessToken": "", // Optional explicit AniList access token override; leave empty to use locally stored token from setup.
|
"accessToken": "", // Optional explicit AniList access token override; leave empty to use locally stored token from setup.
|
||||||
|
"characterDictionary": {
|
||||||
|
"enabled": false, // Enable automatic Yomitan character dictionary sync for currently watched AniList media. Values: true | false
|
||||||
|
"refreshTtlHours": 168, // Legacy setting; merged character dictionary retention is now usage-based and this value is ignored.
|
||||||
|
"maxLoaded": 3, // Maximum number of most-recently-used anime snapshots included in the merged Yomitan character dictionary.
|
||||||
|
"evictionPolicy": "delete", // Legacy setting; merged character dictionary eviction is usage-based and this value is ignored. Values: disable | delete
|
||||||
|
"profileScope": "all", // Yomitan profile scope for dictionary enable/disable updates. Values: all | active
|
||||||
|
"collapsibleSections": {
|
||||||
|
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
|
||||||
|
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
|
||||||
|
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
|
||||||
|
} // Collapsible sections setting.
|
||||||
|
} // Character dictionary setting.
|
||||||
}, // Anilist API credentials and update behavior.
|
}, // Anilist API credentials and update behavior.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -286,8 +340,16 @@
|
|||||||
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
||||||
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
||||||
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
||||||
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
|
"directPlayContainers": [
|
||||||
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
|
"mkv",
|
||||||
|
"mp4",
|
||||||
|
"webm",
|
||||||
|
"mov",
|
||||||
|
"flac",
|
||||||
|
"mp3",
|
||||||
|
"aac"
|
||||||
|
], // Container allowlist for direct play decisions.
|
||||||
|
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
|
||||||
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -298,7 +360,7 @@
|
|||||||
"discordPresence": {
|
"discordPresence": {
|
||||||
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
||||||
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
||||||
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
|
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
|
||||||
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -320,7 +382,7 @@
|
|||||||
"telemetryDays": 30, // Telemetry retention window in days.
|
"telemetryDays": 30, // Telemetry retention window in days.
|
||||||
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
||||||
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
||||||
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
|
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
|
||||||
}, // Retention setting.
|
} // Retention setting.
|
||||||
}, // Enable/disable immersion tracking.
|
} // Enable/disable immersion tracking.
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,28 +0,0 @@
|
|||||||
# Anki Integration
|
|
||||||
|
|
||||||
read_when:
|
|
||||||
- changing `src/anki-integration.ts`
|
|
||||||
- changing Anki transport/config hot-reload behavior
|
|
||||||
- tracing note update, field grouping, or proxy ownership
|
|
||||||
|
|
||||||
## Ownership
|
|
||||||
|
|
||||||
- `src/anki-integration.ts`: thin facade; wires dependencies; exposes public Anki API used by runtime/services.
|
|
||||||
- `src/anki-integration/runtime.ts`: normalized config state, polling-vs-proxy transport lifecycle, runtime config patch handling.
|
|
||||||
- `src/anki-integration/card-creation.ts`: sentence/audio card creation and clipboard update flow.
|
|
||||||
- `src/anki-integration/note-update-workflow.ts`: enrich newly added notes.
|
|
||||||
- `src/anki-integration/field-grouping.ts`: preview/build helpers for Kiku field grouping.
|
|
||||||
- `src/anki-integration/field-grouping-workflow.ts`: auto/manual merge execution.
|
|
||||||
- `src/anki-integration/anki-connect-proxy.ts`: local proxy transport for post-add enrichment.
|
|
||||||
- `src/anki-integration/known-word-cache.ts`: known-word cache lifecycle and persistence.
|
|
||||||
|
|
||||||
## Refactor seam
|
|
||||||
|
|
||||||
`AnkiIntegrationRuntime` owns the cluster that previously mixed:
|
|
||||||
|
|
||||||
- config normalization/defaulting
|
|
||||||
- polling vs proxy startup/shutdown
|
|
||||||
- transport restart decisions during runtime patches
|
|
||||||
- known-word cache lifecycle toggles tied to config changes
|
|
||||||
|
|
||||||
Keep new orchestration work in `runtime.ts` when it changes process-level Anki state. Keep note/card behavior in the workflow/service modules.
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
# Character Name Gating Implementation Plan
|
|
||||||
|
|
||||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
|
||||||
|
|
||||||
**Goal:** Disable subtitle character-name lookup/highlighting when the AniList character dictionary feature is disabled, while keeping tokenization and all other annotations working.
|
|
||||||
|
|
||||||
**Architecture:** Gate `getNameMatchEnabled` at the runtime-deps boundary used by subtitle tokenization. Keep the tokenizer pipeline intact and only suppress character-name metadata requests when `anilist.characterDictionary.enabled` is false, regardless of `subtitleStyle.nameMatchEnabled`.
|
|
||||||
|
|
||||||
**Tech Stack:** TypeScript, Bun test runner, Electron main/runtime wiring.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Task 1: Add runtime gating coverage
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add a test proving `getNameMatchEnabled()` resolves to `false` when `getCharacterDictionaryEnabled()` is `false` even if `getNameMatchEnabled()` is `true`.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
|
|
||||||
Expected: FAIL because the deps builder does not yet combine the two flags.
|
|
||||||
|
|
||||||
### Task 2: Implement minimal runtime gate
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.ts`
|
|
||||||
- Modify: `src/main.ts`
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Add `getCharacterDictionaryEnabled` to the main handler deps and make the built `getNameMatchEnabled` return true only when both the subtitle setting and the character dictionary setting are enabled.
|
|
||||||
|
|
||||||
**Step 4: Run tests to verify green**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
|
|
||||||
Expected: PASS.
|
|
||||||
|
|
||||||
### Task 3: Verify no regressions in related tokenization seams
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: none unless failures reveal drift
|
|
||||||
|
|
||||||
**Step 5: Run focused verification**
|
|
||||||
|
|
||||||
Run: `bun test src/core/services/subtitle-processing-controller.test.ts src/main/runtime/subtitle-tokenization-main-deps.test.ts`
|
|
||||||
Expected: PASS.
|
|
||||||
@@ -1,155 +0,0 @@
|
|||||||
# Immersion SQLite Verification Implementation Plan
|
|
||||||
|
|
||||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
|
||||||
|
|
||||||
**Goal:** Make the SQLite-backed immersion tracking persistence tests visible in the repo's verification surface and reproducible through at least one documented automated command.
|
|
||||||
|
|
||||||
**Architecture:** Keep the existing Bun fast lane intact for routine local verification, but add an explicit SQLite verification lane that runs the database-backed immersion tests under a runtime with `node:sqlite` support. Surface unsupported-runtime behavior clearly in the source tests and contributor docs so skipped or omitted coverage is no longer mistaken for a fully green persistence lane.
|
|
||||||
|
|
||||||
**Tech Stack:** TypeScript, Bun scripts in `package.json`, Node's built-in `node:test` and `node:sqlite`, GitHub Actions workflows, Markdown docs in `README.md`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Task 1: Audit and expose the SQLite-backed immersion test surface
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `src/core/services/immersion-tracker-service.test.ts`
|
|
||||||
- Modify: `src/core/services/immersion-tracker/storage-session.test.ts`
|
|
||||||
- Reference: `src/main/runtime/registry.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Refactor the SQLite-gated immersion tests so missing `node:sqlite` support is reported with an explicit skip reason instead of a silent top-level `test.skip` alias.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
|
|
||||||
Expected: the current output shows generic skips or hides the storage-session suite from normal scripted verification, which is too opaque for contributors.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Mirror the `src/main/runtime/registry.test.ts` pattern: add a helper that either loads `DatabaseSync` or skips with a message like `requires node:sqlite support in this runtime`, then wrap each SQLite-backed test through that helper.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
|
|
||||||
Expected: PASS, with explicit skip messages in unsupported runtimes.
|
|
||||||
|
|
||||||
### Task 2: Add a reproducible SQLite verification command
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `package.json`
|
|
||||||
- Reference: `src/core/services/immersion-tracker-service.test.ts`
|
|
||||||
- Reference: `src/core/services/immersion-tracker/storage-session.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add a dedicated script contract for the SQLite-backed immersion verification lane so both persistence-heavy suites are intentionally grouped and runnable together.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: FAIL because no such reproducible lane exists yet.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `package.json` with explicit scripts for the SQLite lane. Prefer a command shape that actually executes the built JS tests under Node with `node:sqlite` support, for example:
|
|
||||||
|
|
||||||
- `test:immersion:sqlite:dist`: `node --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js`
|
|
||||||
- `test:immersion:sqlite`: `bun run build && bun run test:immersion:sqlite:dist`
|
|
||||||
|
|
||||||
If build cost or runtime behavior requires a small adjustment, keep the core contract the same: one documented command must run both SQLite-backed immersion suites end-to-end.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: PASS in a Node runtime with `node:sqlite`, executing both persistence suites without Bun-only skips.
|
|
||||||
|
|
||||||
### Task 3: Wire the SQLite lane into automated verification
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `.github/workflows/ci.yml`
|
|
||||||
- Modify: `.github/workflows/release.yml`
|
|
||||||
- Reference: `package.json`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add the new SQLite immersion lane to the repo's automated verification so contributors and CI can rely on a real persistence check rather than the Bun fast lane alone.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: local command may pass, but CI/release workflows still omit the lane entirely.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update both workflows to provision a Node version with `node:sqlite` support before the SQLite lane runs, then execute `bun run test:immersion:sqlite` in the quality gate after the bundle build produces `dist/**` test files.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: PASS locally, and workflow definitions clearly show the SQLite lane as part of automated verification.
|
|
||||||
|
|
||||||
### Task 4: Document contributor-facing prerequisites and commands
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `README.md`
|
|
||||||
- Reference: `package.json`
|
|
||||||
- Reference: `.github/workflows/ci.yml`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Extend the verification docs so contributors can discover the SQLite lane, know why the Bun source lane may skip those cases, and understand which command reproduces the persistence coverage.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `grep -n "test:immersion:sqlite" README.md`
|
|
||||||
Expected: FAIL because the dedicated immersion SQLite lane is undocumented.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `README.md` to document:
|
|
||||||
|
|
||||||
- the Bun fast/default lane versus the SQLite persistence lane
|
|
||||||
- the `node:sqlite` prerequisite for the reproducible command
|
|
||||||
- that the dedicated lane covers session persistence/finalization behavior beyond seam tests
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `grep -n "test:immersion:sqlite" README.md && grep -n "node:sqlite" README.md`
|
|
||||||
Expected: PASS, with clear contributor guidance.
|
|
||||||
|
|
||||||
### Task 5: Verify persistence coverage end-to-end
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Test: `src/core/services/immersion-tracker-service.test.ts`
|
|
||||||
- Test: `src/core/services/immersion-tracker/storage-session.test.ts`
|
|
||||||
- Reference: `README.md`
|
|
||||||
- Reference: `package.json`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Prove the final lane exercises real DB-backed persistence/finalization paths, not just the seam tests.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: before implementation, the command does not exist or does not cover both SQLite-backed suites.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Keep the dedicated lane pointed at both existing SQLite-backed test files so it covers representative finalization and persistence behavior such as:
|
|
||||||
|
|
||||||
- `destroy finalizes active session and persists final telemetry`
|
|
||||||
- `start/finalize session updates ended_at and status`
|
|
||||||
- `executeQueuedWrite inserts event and telemetry rows`
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:immersion:sqlite`
|
|
||||||
Expected: PASS, with those DB-backed persistence/finalization cases executing successfully under Node.
|
|
||||||
@@ -1,92 +0,0 @@
|
|||||||
# Merged Character Dictionary Implementation Plan
|
|
||||||
|
|
||||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
|
||||||
|
|
||||||
**Goal:** Replace per-anime character dictionary imports with one merged Yomitan dictionary driven by MRU usage retention.
|
|
||||||
|
|
||||||
**Architecture:** Persist normalized per-media character dictionary snapshots locally, maintain MRU retained media ids in auto-sync state, and rebuild a single merged Yomitan zip only when the retained set changes. Keep external AniList fetches only for media without a local snapshot; normal revisits stay local.
|
|
||||||
|
|
||||||
**Tech Stack:** TypeScript, Bun test, Node fs/path, existing Yomitan zip generation helpers.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Task 1: Lock in merged auto-sync behavior
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `src/main/runtime/character-dictionary-auto-sync.test.ts`
|
|
||||||
- Test: `src/main/runtime/character-dictionary-auto-sync.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add tests for:
|
|
||||||
- single merged dictionary title/import replacing per-media imports
|
|
||||||
- MRU reorder causing rebuild only when order changes
|
|
||||||
- unchanged revisit skipping rebuild/import
|
|
||||||
- capped retained set evicting least-recently-used media
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
|
|
||||||
Expected: FAIL on old per-media import assumptions / missing merged behavior
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update auto-sync runtime to track retained media ids and merged revision/hash, call merged zip builder, and replace one imported Yomitan dictionary.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
|
|
||||||
Expected: PASS
|
|
||||||
|
|
||||||
### Task 2: Add snapshot + merged-zip runtime support
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `src/main/character-dictionary-runtime.ts`
|
|
||||||
- Modify: `src/main/character-dictionary-runtime.test.ts`
|
|
||||||
- Test: `src/main/character-dictionary-runtime.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add tests for:
|
|
||||||
- saving/loading normalized per-media snapshots without per-media zip cache
|
|
||||||
- building merged zip from retained media snapshots with stable dictionary title
|
|
||||||
- preserving images/terms from multiple media in merged output
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/main/character-dictionary-runtime.test.ts`
|
|
||||||
Expected: FAIL because snapshot/merged APIs do not exist yet
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Refactor dictionary runtime to expose snapshot generation/loading and merged zip building from stored metadata/images.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/main/character-dictionary-runtime.test.ts`
|
|
||||||
Expected: PASS
|
|
||||||
|
|
||||||
### Task 3: Wire app/runtime config and docs
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
- Modify: `src/main.ts`
|
|
||||||
- Modify: `src/config/definitions/options-integrations.ts`
|
|
||||||
- Modify: `README.md`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add or update tests if needed for new dependency wiring / docs-adjacent config description expectations.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts`
|
|
||||||
Expected: FAIL until wiring matches merged flow
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Swap app wiring to new snapshot + merged build API, update config/docs text from TTL semantics to usage-based merged retention.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts && bun run tsc --noEmit`
|
|
||||||
Expected: PASS
|
|
||||||
@@ -1,121 +0,0 @@
|
|||||||
# Subtitle Sync Verification Implementation Plan
|
|
||||||
|
|
||||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
|
||||||
|
|
||||||
**Goal:** Replace the no-op `test:subtitle` lane with real automated subtitle-sync verification that reuses the maintained subsync tests and documents the real contributor workflow.
|
|
||||||
|
|
||||||
**Architecture:** Repoint the subtitle verification command at the existing source-level subsync tests instead of inventing a second hidden suite. Add one focused ffsubsync failure-path test so the subtitle lane explicitly covers both engines plus a non-happy path, then update contributor docs to describe the dedicated subtitle lane and how it relates to `test:core`.
|
|
||||||
|
|
||||||
**Tech Stack:** TypeScript, Bun test, Node test/assert, npm package scripts, Markdown docs.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Task 1: Lock subtitle lane to real subsync tests
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `package.json`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Define the intended command shape first: `test:subtitle:src` should run `src/core/services/subsync.test.ts` and `src/subsync/utils.test.ts`, `test:subtitle` should invoke that real source lane, and no placeholder echo should remain.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle`
|
|
||||||
Expected: It performs a build and prints `Subtitle tests are currently not configured`, proving the lane is still a no-op.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `package.json` so:
|
|
||||||
|
|
||||||
- `test:subtitle:src` runs `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`
|
|
||||||
- `test:subtitle` runs the new source lane directly
|
|
||||||
- `test:subtitle:dist` is removed if it is no longer the real verification path
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle`
|
|
||||||
Expected: PASS with Bun executing the real subtitle-sync test files.
|
|
||||||
|
|
||||||
### Task 2: Add explicit ffsubsync non-happy-path coverage
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `src/core/services/subsync.test.ts`
|
|
||||||
- Test: `src/core/services/subsync.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add a test that runs `runSubsyncManual({ engine: 'ffsubsync' })` with a stub ffsubsync executable that exits non-zero and writes stderr, then assert:
|
|
||||||
|
|
||||||
- `result.ok === false`
|
|
||||||
- `result.message` starts with `ffsubsync synchronization failed`
|
|
||||||
- the failure message includes command details surfaced to the user
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun test src/core/services/subsync.test.ts`
|
|
||||||
Expected: FAIL because ffsubsync failure propagation is not asserted yet.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Keep production code unchanged unless the new test exposes a real bug. If needed, tighten failure assertions or message propagation in `src/core/services/subsync.ts` without changing successful behavior.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/core/services/subsync.test.ts`
|
|
||||||
Expected: PASS with both alass and ffsubsync paths covered, including a non-happy path.
|
|
||||||
|
|
||||||
### Task 3: Make contributor docs match the real verification path
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `README.md`
|
|
||||||
- Modify: `package.json`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Use the repository state as the failure signal: README currently advertises subtitle sync as a feature but does not tell contributors that `bun run test:subtitle` is the real verification lane.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle && bun test src/subsync/utils.test.ts`
|
|
||||||
Expected: Tests pass, but docs still do not explain the lane; this is the remaining acceptance-criteria gap.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `README.md` with a short contributor-facing verification note that:
|
|
||||||
|
|
||||||
- points to `bun run test:subtitle` for subtitle-sync coverage
|
|
||||||
- states that the lane reuses the maintained subsync tests already included in broader core coverage
|
|
||||||
- avoids implying there is a separate hidden subtitle test harness beyond those tests
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle`
|
|
||||||
Expected: PASS, with docs and scripts now aligned around the same subtitle verification strategy.
|
|
||||||
|
|
||||||
### Task 4: Verify matrix integration stays clean
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `package.json` (only if Task 1/3 exposed cleanup needs)
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Treat duplication as the failure condition: confirm the dedicated subtitle lane reuses the same maintained files already present in `test:core:src` rather than creating a second divergent suite.
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle && bun run test:core:src`
|
|
||||||
Expected: If file lists diverge unexpectedly, this review step exposes it before handoff.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
If needed, do the smallest script cleanup necessary so subtitle coverage remains explicit without hiding or duplicating existing core coverage.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:subtitle && bun run test:core:src`
|
|
||||||
Expected: PASS, confirming the dedicated lane and the broader core suite agree on subtitle coverage.
|
|
||||||
@@ -1,169 +0,0 @@
|
|||||||
# Testing Workflow Test Matrix Implementation Plan
|
|
||||||
|
|
||||||
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
|
|
||||||
|
|
||||||
**Goal:** Make the standard test commands reflect the maintained test surface so newly added tests are discovered automatically or intentionally documented outside the default lane.
|
|
||||||
|
|
||||||
**Architecture:** Replace the current hand-maintained file allowlists in `package.json` with directory-based Bun test lanes that map to maintained test surfaces. Keep the default developer lane fast, move slower or environment-specific checks into explicit commands, and document the resulting matrix in `README.md` so contributors know exactly which command to run.
|
|
||||||
|
|
||||||
**Tech Stack:** TypeScript, Bun test, npm-style package scripts in `package.json`, Markdown docs in `README.md`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Task 1: Lock in the desired script matrix with failing tests/audit checks
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `package.json`
|
|
||||||
- Test: `package.json`
|
|
||||||
- Reference: `src/main-entry-runtime.test.ts`
|
|
||||||
- Reference: `src/anki-integration/anki-connect-proxy.test.ts`
|
|
||||||
- Reference: `src/main/runtime/registry.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add a new script structure in `package.json` expectations by editing the script map so these lanes exist conceptually:
|
|
||||||
|
|
||||||
- `test:fast` for default fast verification
|
|
||||||
- `test:full` for the maintained source test surface
|
|
||||||
- `test:env` for environment-specific checks
|
|
||||||
|
|
||||||
The fast lane should stay selective and intentional. The full lane should use directory-based discovery rather than file-by-file allowlists, with representative coverage from:
|
|
||||||
|
|
||||||
- `src/main-entry-runtime.test.ts`
|
|
||||||
- `src/anki-integration/**/*.test.ts`
|
|
||||||
- `src/main/**/*.test.ts`
|
|
||||||
- `launcher/**/*.test.ts`
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:full`
|
|
||||||
Expected: FAIL because `test:full` does not exist yet, and previously omitted maintained tests are still outside the standard matrix.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `package.json` scripts so:
|
|
||||||
|
|
||||||
- `test` points at `test:fast`
|
|
||||||
- `test:fast` runs the fast default lane only
|
|
||||||
- `test:full` runs directory-based maintained suites instead of file allowlists
|
|
||||||
- `test:env` runs environment-specific verification (for example launcher/plugin and sqlite-gated suites)
|
|
||||||
- subsystem scripts use stable path globs or directory arguments so new tests are discovered automatically
|
|
||||||
|
|
||||||
Prefer commands like these, adjusted only as needed for Bun behavior in this repo:
|
|
||||||
|
|
||||||
- `bun test src/config/**/*.test.ts`
|
|
||||||
- `bun test src/{cli,core,renderer,subtitle,subsync,main,anki-integration}/*.test.ts ...` only if Bun cannot take the broader directory directly
|
|
||||||
- `bun test launcher/**/*.test.ts`
|
|
||||||
|
|
||||||
Do not keep large hand-maintained file enumerations for maintained unit/integration lanes.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:full`
|
|
||||||
Expected: PASS, including automated execution of representative tests that were previously omitted from the standard matrix.
|
|
||||||
|
|
||||||
### Task 2: Separate environment-specific verification from the maintained default/full lanes
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `package.json`
|
|
||||||
- Test: `src/main/runtime/registry.test.ts`
|
|
||||||
- Test: `launcher/smoke.e2e.test.ts`
|
|
||||||
- Test: `src/core/services/immersion-tracker-service.test.ts`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Refine the package scripts so environment-specific checks are explicitly grouped outside the default fast lane. Treat these as the primary environment-specific examples unless repo behavior proves a better split during execution:
|
|
||||||
|
|
||||||
- launcher smoke/plugin checks that rely on local process or Lua execution
|
|
||||||
- sqlite-dependent checks that may skip when `node:sqlite` is unavailable
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:env`
|
|
||||||
Expected: FAIL because the environment-specific lane is not defined yet.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Add explicit environment-specific scripts in `package.json`, such as:
|
|
||||||
|
|
||||||
- a launcher/plugin lane that runs `launcher/smoke.e2e.test.ts` plus `lua scripts/test-plugin-start-gate.lua`
|
|
||||||
- a sqlite lane for tests that require `node:sqlite` support or otherwise need environment notes
|
|
||||||
- an aggregate `test:env` command that runs all environment-specific lanes
|
|
||||||
|
|
||||||
Keep these lanes documented and reproducible rather than silently excluded.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun run test:env`
|
|
||||||
Expected: PASS in supported environments, or clear documented skip behavior where the tests themselves intentionally gate on missing runtime support.
|
|
||||||
|
|
||||||
### Task 3: Document contributor-facing test commands and matrix
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Modify: `README.md`
|
|
||||||
- Reference: `package.json`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Add a contributor-focused testing section requirement in `README.md` expectations:
|
|
||||||
|
|
||||||
- fast verification command
|
|
||||||
- full verification command
|
|
||||||
- environment-specific verification command
|
|
||||||
- plain-language explanation of which suites each lane covers and why
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `grep -n "Testing" README.md`
|
|
||||||
Expected: no contributor testing matrix section exists yet.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Update `README.md` with a concise `Testing` section that documents:
|
|
||||||
|
|
||||||
- `bun run test` / `bun run test:fast` for fast local verification
|
|
||||||
- `bun run test:full` for the maintained source test surface
|
|
||||||
- `bun run test:env` for environment-specific verification
|
|
||||||
- any important notes about sqlite-gated tests and launcher/plugin checks
|
|
||||||
|
|
||||||
Keep the matrix concrete and reproducible.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `grep -n "Testing" README.md && grep -n "test:full" README.md && grep -n "test:env" README.md`
|
|
||||||
Expected: PASS with the new contributor-facing matrix present.
|
|
||||||
|
|
||||||
### Task 4: Verify representative omitted suites now belong to automated lanes
|
|
||||||
|
|
||||||
**Files:**
|
|
||||||
|
|
||||||
- Test: `src/main-entry-runtime.test.ts`
|
|
||||||
- Test: `src/anki-integration/anki-connect-proxy.test.ts`
|
|
||||||
- Test: `src/main/runtime/registry.test.ts`
|
|
||||||
- Reference: `package.json`
|
|
||||||
- Reference: `README.md`
|
|
||||||
|
|
||||||
**Step 1: Write the failing test**
|
|
||||||
|
|
||||||
Use targeted command checks to prove these previously omitted surfaces are now in the matrix:
|
|
||||||
|
|
||||||
- entry/runtime: `src/main-entry-runtime.test.ts`
|
|
||||||
- Anki integration: `src/anki-integration/anki-connect-proxy.test.ts`
|
|
||||||
- main runtime: `src/main/runtime/registry.test.ts`
|
|
||||||
|
|
||||||
**Step 2: Run test to verify it fails**
|
|
||||||
|
|
||||||
Run: `bun run test:full src/main-entry-runtime.test.ts`
|
|
||||||
Expected: either unsupported invocation or evidence that the current matrix still does not include these surfaces automatically.
|
|
||||||
|
|
||||||
**Step 3: Write minimal implementation**
|
|
||||||
|
|
||||||
Adjust the final script paths/globs until the full matrix includes those representative surfaces without file-by-file script maintenance.
|
|
||||||
|
|
||||||
**Step 4: Run test to verify it passes**
|
|
||||||
|
|
||||||
Run: `bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/main/runtime/registry.test.ts && bun run test:fast && bun run test:full`
|
|
||||||
Expected: PASS, with at least one representative test from each required surface executing through the documented automated lanes.
|
|
||||||
@@ -1,4 +1,6 @@
|
|||||||
import fs from 'node:fs';
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import { spawn } from 'node:child_process';
|
||||||
import { fail, log } from '../log.js';
|
import { fail, log } from '../log.js';
|
||||||
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
|
||||||
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
|
||||||
@@ -13,6 +15,15 @@ import {
|
|||||||
import { generateYoutubeSubtitles } from '../youtube.js';
|
import { generateYoutubeSubtitles } from '../youtube.js';
|
||||||
import type { Args } from '../types.js';
|
import type { Args } from '../types.js';
|
||||||
import type { LauncherCommandContext } from './context.js';
|
import type { LauncherCommandContext } from './context.js';
|
||||||
|
import { ensureLauncherSetupReady } from '../setup-gate.js';
|
||||||
|
import {
|
||||||
|
getDefaultConfigDir,
|
||||||
|
getSetupStatePath,
|
||||||
|
readSetupState,
|
||||||
|
} from '../../src/shared/setup-state.js';
|
||||||
|
|
||||||
|
const SETUP_WAIT_TIMEOUT_MS = 10 * 60 * 1000;
|
||||||
|
const SETUP_POLL_INTERVAL_MS = 500;
|
||||||
|
|
||||||
function checkDependencies(args: Args): void {
|
function checkDependencies(args: Args): void {
|
||||||
const missing: string[] = [];
|
const missing: string[] = [];
|
||||||
@@ -84,12 +95,47 @@ function registerCleanup(context: LauncherCommandContext): void {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function ensurePlaybackSetupReady(context: LauncherCommandContext): Promise<void> {
|
||||||
|
const { args, appPath } = context;
|
||||||
|
if (!appPath) return;
|
||||||
|
|
||||||
|
const configDir = getDefaultConfigDir({
|
||||||
|
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
||||||
|
homeDir: os.homedir(),
|
||||||
|
});
|
||||||
|
const statePath = getSetupStatePath(configDir);
|
||||||
|
const ready = await ensureLauncherSetupReady({
|
||||||
|
readSetupState: () => readSetupState(statePath),
|
||||||
|
launchSetupApp: () => {
|
||||||
|
const setupArgs = ['--background', '--setup'];
|
||||||
|
if (args.logLevel) {
|
||||||
|
setupArgs.push('--log-level', args.logLevel);
|
||||||
|
}
|
||||||
|
const child = spawn(appPath, setupArgs, {
|
||||||
|
detached: true,
|
||||||
|
stdio: 'ignore',
|
||||||
|
});
|
||||||
|
child.unref();
|
||||||
|
},
|
||||||
|
sleep: (ms) => new Promise((resolve) => setTimeout(resolve, ms)),
|
||||||
|
now: () => Date.now(),
|
||||||
|
timeoutMs: SETUP_WAIT_TIMEOUT_MS,
|
||||||
|
pollIntervalMs: SETUP_POLL_INTERVAL_MS,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!ready) {
|
||||||
|
fail('SubMiner setup is incomplete. Complete setup in the app, then retry playback.');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
|
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
|
||||||
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
|
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
|
||||||
if (!appPath) {
|
if (!appPath) {
|
||||||
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
|
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await ensurePlaybackSetupReady(context);
|
||||||
|
|
||||||
if (!args.target) {
|
if (!args.target) {
|
||||||
checkPickerDependencies(args);
|
checkPickerDependencies(args);
|
||||||
}
|
}
|
||||||
|
|||||||
107
launcher/setup-gate.test.ts
Normal file
107
launcher/setup-gate.test.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import { ensureLauncherSetupReady, waitForSetupCompletion } from './setup-gate';
|
||||||
|
import type { SetupState } from '../src/shared/setup-state';
|
||||||
|
|
||||||
|
test('waitForSetupCompletion resolves completed and cancelled states', async () => {
|
||||||
|
const sequence: Array<SetupState | null> = [
|
||||||
|
null,
|
||||||
|
{
|
||||||
|
version: 1,
|
||||||
|
status: 'in_progress',
|
||||||
|
completedAt: null,
|
||||||
|
completionSource: null,
|
||||||
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
|
pluginInstallStatus: 'unknown',
|
||||||
|
pluginInstallPathSummary: null,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
version: 1,
|
||||||
|
status: 'completed',
|
||||||
|
completedAt: '2026-03-07T00:00:00.000Z',
|
||||||
|
completionSource: 'user',
|
||||||
|
lastSeenYomitanDictionaryCount: 1,
|
||||||
|
pluginInstallStatus: 'skipped',
|
||||||
|
pluginInstallPathSummary: null,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await waitForSetupCompletion({
|
||||||
|
readSetupState: () => sequence.shift() ?? null,
|
||||||
|
sleep: async () => undefined,
|
||||||
|
now: (() => {
|
||||||
|
let value = 0;
|
||||||
|
return () => (value += 100);
|
||||||
|
})(),
|
||||||
|
timeoutMs: 5_000,
|
||||||
|
pollIntervalMs: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result, 'completed');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ensureLauncherSetupReady launches setup app and resumes only after completion', async () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
let reads = 0;
|
||||||
|
|
||||||
|
const ready = await ensureLauncherSetupReady({
|
||||||
|
readSetupState: () => {
|
||||||
|
reads += 1;
|
||||||
|
if (reads === 1) return null;
|
||||||
|
if (reads === 2) {
|
||||||
|
return {
|
||||||
|
version: 1,
|
||||||
|
status: 'in_progress',
|
||||||
|
completedAt: null,
|
||||||
|
completionSource: null,
|
||||||
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
|
pluginInstallStatus: 'unknown',
|
||||||
|
pluginInstallPathSummary: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
version: 1,
|
||||||
|
status: 'completed',
|
||||||
|
completedAt: '2026-03-07T00:00:00.000Z',
|
||||||
|
completionSource: 'user',
|
||||||
|
lastSeenYomitanDictionaryCount: 1,
|
||||||
|
pluginInstallStatus: 'installed',
|
||||||
|
pluginInstallPathSummary: '/tmp/mpv',
|
||||||
|
};
|
||||||
|
},
|
||||||
|
launchSetupApp: () => {
|
||||||
|
calls.push('launch');
|
||||||
|
},
|
||||||
|
sleep: async () => undefined,
|
||||||
|
now: (() => {
|
||||||
|
let value = 0;
|
||||||
|
return () => (value += 100);
|
||||||
|
})(),
|
||||||
|
timeoutMs: 5_000,
|
||||||
|
pollIntervalMs: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(ready, true);
|
||||||
|
assert.deepEqual(calls, ['launch']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('ensureLauncherSetupReady fails on timeout/cancelled state', async () => {
|
||||||
|
const result = await ensureLauncherSetupReady({
|
||||||
|
readSetupState: () => ({
|
||||||
|
version: 1,
|
||||||
|
status: 'cancelled',
|
||||||
|
completedAt: null,
|
||||||
|
completionSource: null,
|
||||||
|
lastSeenYomitanDictionaryCount: 0,
|
||||||
|
pluginInstallStatus: 'unknown',
|
||||||
|
pluginInstallPathSummary: null,
|
||||||
|
}),
|
||||||
|
launchSetupApp: () => undefined,
|
||||||
|
sleep: async () => undefined,
|
||||||
|
now: () => 0,
|
||||||
|
timeoutMs: 5_000,
|
||||||
|
pollIntervalMs: 100,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result, false);
|
||||||
|
});
|
||||||
41
launcher/setup-gate.ts
Normal file
41
launcher/setup-gate.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import { isSetupCompleted, type SetupState } from '../src/shared/setup-state.js';
|
||||||
|
|
||||||
|
export async function waitForSetupCompletion(deps: {
|
||||||
|
readSetupState: () => SetupState | null;
|
||||||
|
sleep: (ms: number) => Promise<void>;
|
||||||
|
now: () => number;
|
||||||
|
timeoutMs: number;
|
||||||
|
pollIntervalMs: number;
|
||||||
|
}): Promise<'completed' | 'cancelled' | 'timeout'> {
|
||||||
|
const deadline = deps.now() + deps.timeoutMs;
|
||||||
|
|
||||||
|
while (deps.now() <= deadline) {
|
||||||
|
const state = deps.readSetupState();
|
||||||
|
if (isSetupCompleted(state)) {
|
||||||
|
return 'completed';
|
||||||
|
}
|
||||||
|
if (state?.status === 'cancelled') {
|
||||||
|
return 'cancelled';
|
||||||
|
}
|
||||||
|
await deps.sleep(deps.pollIntervalMs);
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'timeout';
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function ensureLauncherSetupReady(deps: {
|
||||||
|
readSetupState: () => SetupState | null;
|
||||||
|
launchSetupApp: () => void;
|
||||||
|
sleep: (ms: number) => Promise<void>;
|
||||||
|
now: () => number;
|
||||||
|
timeoutMs: number;
|
||||||
|
pollIntervalMs: number;
|
||||||
|
}): Promise<boolean> {
|
||||||
|
if (isSetupCompleted(deps.readSetupState())) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
deps.launchSetupApp();
|
||||||
|
const result = await waitForSetupCompletion(deps);
|
||||||
|
return result === 'completed';
|
||||||
|
}
|
||||||
@@ -4,6 +4,13 @@ import fs from 'node:fs';
|
|||||||
import os from 'node:os';
|
import os from 'node:os';
|
||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import { spawn, spawnSync } from 'node:child_process';
|
import { spawn, spawnSync } from 'node:child_process';
|
||||||
|
import {
|
||||||
|
createDefaultSetupState,
|
||||||
|
getDefaultConfigDir,
|
||||||
|
getSetupStatePath,
|
||||||
|
readSetupState,
|
||||||
|
writeSetupState,
|
||||||
|
} from '../src/shared/setup-state.js';
|
||||||
|
|
||||||
type RunResult = {
|
type RunResult = {
|
||||||
status: number | null;
|
status: number | null;
|
||||||
@@ -58,6 +65,13 @@ function createSmokeCase(name: string): SmokeCase {
|
|||||||
`socket_path=${socketPath}\n`,
|
`socket_path=${socketPath}\n`,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
const configDir = getDefaultConfigDir({ xdgConfigHome, homeDir });
|
||||||
|
const setupState = createDefaultSetupState();
|
||||||
|
setupState.status = 'completed';
|
||||||
|
setupState.completedAt = '2026-03-07T00:00:00.000Z';
|
||||||
|
setupState.completionSource = 'user';
|
||||||
|
writeSetupState(getSetupStatePath(configDir), setupState);
|
||||||
|
|
||||||
const fakeMpvLogPath = path.join(artifactsDir, 'fake-mpv.log');
|
const fakeMpvLogPath = path.join(artifactsDir, 'fake-mpv.log');
|
||||||
const fakeAppLogPath = path.join(artifactsDir, 'fake-app.log');
|
const fakeAppLogPath = path.join(artifactsDir, 'fake-app.log');
|
||||||
const fakeAppStartLogPath = path.join(artifactsDir, 'fake-app-start.log');
|
const fakeAppStartLogPath = path.join(artifactsDir, 'fake-app-start.log');
|
||||||
@@ -224,6 +238,22 @@ async function waitForJsonLines(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
test('launcher smoke fixture seeds completed setup state', () => {
|
||||||
|
const smokeCase = createSmokeCase('setup-state');
|
||||||
|
try {
|
||||||
|
const configDir = getDefaultConfigDir({
|
||||||
|
xdgConfigHome: smokeCase.xdgConfigHome,
|
||||||
|
homeDir: smokeCase.homeDir,
|
||||||
|
});
|
||||||
|
const statePath = getSetupStatePath(configDir);
|
||||||
|
|
||||||
|
assert.equal(readSetupState(statePath)?.status, 'completed');
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(smokeCase.root, { recursive: true, force: true });
|
||||||
|
fs.rmSync(smokeCase.socketDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('launcher mpv status returns ready when socket is connectable', async () => {
|
test('launcher mpv status returns ready when socket is connectable', async () => {
|
||||||
await withSmokeCase('mpv-status', async (smokeCase) => {
|
await withSmokeCase('mpv-status', async (smokeCase) => {
|
||||||
const env = makeTestEnv(smokeCase);
|
const env = makeTestEnv(smokeCase);
|
||||||
|
|||||||
29
package.json
29
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "subminer",
|
"name": "subminer",
|
||||||
"version": "0.3.0",
|
"version": "0.4.0",
|
||||||
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
||||||
"packageManager": "bun@1.3.5",
|
"packageManager": "bun@1.3.5",
|
||||||
"main": "dist/main-entry.js",
|
"main": "dist/main-entry.js",
|
||||||
@@ -8,21 +8,24 @@
|
|||||||
"typecheck": "tsc --noEmit -p tsconfig.typecheck.json",
|
"typecheck": "tsc --noEmit -p tsconfig.typecheck.json",
|
||||||
"typecheck:watch": "tsc --watch --preserveWatchOutput -p tsconfig.typecheck.json",
|
"typecheck:watch": "tsc --watch --preserveWatchOutput -p tsconfig.typecheck.json",
|
||||||
"get-frequency": "bun run scripts/get_frequency.ts --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
"get-frequency": "bun run scripts/get_frequency.ts --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
||||||
"get-frequency:electron": "bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
|
||||||
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
|
||||||
"test-yomitan-parser:electron": "bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
|
||||||
"build": "tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
|
"build:yomitan": "node scripts/build-yomitan.mjs",
|
||||||
|
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
|
||||||
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
||||||
"format": "prettier --write .",
|
"format": "prettier --write .",
|
||||||
"format:check": "prettier --check .",
|
"format:check": "prettier --check .",
|
||||||
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts",
|
"format:src": "bash scripts/prettier-scope.sh --write",
|
||||||
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js",
|
"format:check:src": "bash scripts/prettier-scope.sh --check",
|
||||||
|
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
|
||||||
|
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
|
||||||
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
||||||
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
|
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua",
|
||||||
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
||||||
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
||||||
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts",
|
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
||||||
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
||||||
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
||||||
@@ -117,7 +120,7 @@
|
|||||||
],
|
],
|
||||||
"extraResources": [
|
"extraResources": [
|
||||||
{
|
{
|
||||||
"from": "vendor/yomitan",
|
"from": "build/yomitan",
|
||||||
"to": "yomitan"
|
"to": "yomitan"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -128,6 +131,14 @@
|
|||||||
"from": "assets",
|
"from": "assets",
|
||||||
"to": "assets"
|
"to": "assets"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"from": "plugin/subminer",
|
||||||
|
"to": "plugin/subminer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"from": "plugin/subminer.conf",
|
||||||
|
"to": "plugin/subminer.conf"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"from": "dist/scripts/get-mpv-window-macos",
|
"from": "dist/scripts/get-mpv-window-macos",
|
||||||
"to": "scripts/get-mpv-window-macos"
|
"to": "scripts/get-mpv-window-macos"
|
||||||
|
|||||||
144
scripts/build-yomitan.mjs
Normal file
144
scripts/build-yomitan.mjs
Normal file
@@ -0,0 +1,144 @@
|
|||||||
|
import fs from 'node:fs';
|
||||||
|
import os from 'node:os';
|
||||||
|
import path from 'node:path';
|
||||||
|
import { createHash } from 'node:crypto';
|
||||||
|
import { execFileSync } from 'node:child_process';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
const dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||||
|
const repoRoot = path.resolve(dirname, '..');
|
||||||
|
const submoduleDir = path.join(repoRoot, 'vendor', 'subminer-yomitan');
|
||||||
|
const submodulePackagePath = path.join(submoduleDir, 'package.json');
|
||||||
|
const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
|
||||||
|
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
|
||||||
|
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
|
||||||
|
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
|
||||||
|
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||||
|
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
|
||||||
|
|
||||||
|
function run(command, args, cwd) {
|
||||||
|
execFileSync(command, args, { cwd, stdio: 'inherit' });
|
||||||
|
}
|
||||||
|
|
||||||
|
function readCommand(command, args, cwd) {
|
||||||
|
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function readStamp() {
|
||||||
|
try {
|
||||||
|
return JSON.parse(fs.readFileSync(stampPath, 'utf8'));
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function hashFile(filePath) {
|
||||||
|
const hash = createHash('sha256');
|
||||||
|
hash.update(fs.readFileSync(filePath));
|
||||||
|
return hash.digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureSubmodulePresent() {
|
||||||
|
if (!fs.existsSync(submodulePackagePath)) {
|
||||||
|
throw new Error(
|
||||||
|
'Missing vendor/subminer-yomitan submodule. Run `git submodule update --init --recursive`.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSourceState() {
|
||||||
|
const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir);
|
||||||
|
const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir);
|
||||||
|
return { revision, dirty };
|
||||||
|
}
|
||||||
|
|
||||||
|
function isBuildCurrent(force) {
|
||||||
|
if (force) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const stamp = readStamp();
|
||||||
|
if (!stamp) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentState = getSourceState();
|
||||||
|
return stamp.revision === currentState.revision && stamp.dirty === currentState.dirty;
|
||||||
|
}
|
||||||
|
|
||||||
|
function ensureDependenciesInstalled() {
|
||||||
|
const nodeModulesDir = path.join(submoduleDir, 'node_modules');
|
||||||
|
const currentLockHash = hashFile(submodulePackageLockPath);
|
||||||
|
let installedLockHash = '';
|
||||||
|
try {
|
||||||
|
installedLockHash = fs.readFileSync(dependencyStampPath, 'utf8').trim();
|
||||||
|
} catch {}
|
||||||
|
|
||||||
|
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
|
||||||
|
run(npmCommand, ['ci'], submoduleDir);
|
||||||
|
fs.mkdirSync(nodeModulesDir, { recursive: true });
|
||||||
|
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function installAndBuild() {
|
||||||
|
ensureDependenciesInstalled();
|
||||||
|
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractBuild() {
|
||||||
|
if (!fs.existsSync(zipPath)) {
|
||||||
|
throw new Error(`Expected Yomitan build artifact at ${zipPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
|
||||||
|
try {
|
||||||
|
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
|
||||||
|
fs.rmSync(buildOutputDir, { recursive: true, force: true });
|
||||||
|
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
|
||||||
|
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
|
||||||
|
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
|
||||||
|
throw new Error(`Extracted Yomitan build missing manifest.json in ${buildOutputDir}`);
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(tempDir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function writeStamp() {
|
||||||
|
const state = getSourceState();
|
||||||
|
fs.writeFileSync(
|
||||||
|
stampPath,
|
||||||
|
`${JSON.stringify(
|
||||||
|
{
|
||||||
|
revision: state.revision,
|
||||||
|
dirty: state.dirty,
|
||||||
|
builtAt: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
null,
|
||||||
|
2,
|
||||||
|
)}\n`,
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function main() {
|
||||||
|
const force = process.argv.includes('--force');
|
||||||
|
ensureSubmodulePresent();
|
||||||
|
|
||||||
|
if (isBuildCurrent(force)) {
|
||||||
|
process.stdout.write(`Yomitan build current: ${buildOutputDir}\n`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write('Building Yomitan Chrome artifact...\n');
|
||||||
|
installAndBuild();
|
||||||
|
extractBuild();
|
||||||
|
writeStamp();
|
||||||
|
process.stdout.write(`Yomitan extracted to ${buildOutputDir}\n`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
||||||
@@ -4,6 +4,7 @@ import process from 'node:process';
|
|||||||
|
|
||||||
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
||||||
import { createFrequencyDictionaryLookup } from '../src/core/services/frequency-dictionary.js';
|
import { createFrequencyDictionaryLookup } from '../src/core/services/frequency-dictionary.js';
|
||||||
|
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
|
||||||
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
||||||
import type { MergedToken, FrequencyDictionaryLookup } from '../src/types.js';
|
import type { MergedToken, FrequencyDictionaryLookup } from '../src/types.js';
|
||||||
|
|
||||||
@@ -94,7 +95,7 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-extension');
|
throw new Error('Missing value for --yomitan-extension');
|
||||||
}
|
}
|
||||||
yomitanExtensionPath = path.resolve(next);
|
yomitanExtensionPath = next;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -103,7 +104,7 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-user-data');
|
throw new Error('Missing value for --yomitan-user-data');
|
||||||
}
|
}
|
||||||
yomitanUserDataPath = path.resolve(next);
|
yomitanUserDataPath = next;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -225,12 +226,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-extension=')) {
|
if (arg.startsWith('--yomitan-extension=')) {
|
||||||
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
|
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-user-data=')) {
|
if (arg.startsWith('--yomitan-user-data=')) {
|
||||||
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
|
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -524,7 +525,10 @@ function destroyUnknownParserWindow(window: unknown): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanRuntimeState> {
|
async function createYomitanRuntimeState(
|
||||||
|
userDataPath: string,
|
||||||
|
extensionPath?: string,
|
||||||
|
): Promise<YomitanRuntimeState> {
|
||||||
const state: YomitanRuntimeState = {
|
const state: YomitanRuntimeState = {
|
||||||
yomitanExt: null,
|
yomitanExt: null,
|
||||||
parserWindow: null,
|
parserWindow: null,
|
||||||
@@ -547,6 +551,7 @@ async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanR
|
|||||||
const loadYomitanExtension = (await import('../src/core/services/yomitan-extension-loader.js'))
|
const loadYomitanExtension = (await import('../src/core/services/yomitan-extension-loader.js'))
|
||||||
.loadYomitanExtension as (options: {
|
.loadYomitanExtension as (options: {
|
||||||
userDataPath: string;
|
userDataPath: string;
|
||||||
|
extensionPath?: string;
|
||||||
getYomitanParserWindow: () => unknown;
|
getYomitanParserWindow: () => unknown;
|
||||||
setYomitanParserWindow: (window: unknown) => void;
|
setYomitanParserWindow: (window: unknown) => void;
|
||||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||||
@@ -556,6 +561,7 @@ async function createYomitanRuntimeState(userDataPath: string): Promise<YomitanR
|
|||||||
|
|
||||||
const extension = await loadYomitanExtension({
|
const extension = await loadYomitanExtension({
|
||||||
userDataPath,
|
userDataPath,
|
||||||
|
extensionPath,
|
||||||
getYomitanParserWindow: () => state.parserWindow,
|
getYomitanParserWindow: () => state.parserWindow,
|
||||||
setYomitanParserWindow: (window) => {
|
setYomitanParserWindow: (window) => {
|
||||||
state.parserWindow = window;
|
state.parserWindow = window;
|
||||||
@@ -589,17 +595,16 @@ async function createYomitanRuntimeStateWithSearch(
|
|||||||
userDataPath: string,
|
userDataPath: string,
|
||||||
extensionPath?: string,
|
extensionPath?: string,
|
||||||
): Promise<YomitanRuntimeState> {
|
): Promise<YomitanRuntimeState> {
|
||||||
const preferredPath = extensionPath ? path.resolve(extensionPath) : undefined;
|
const resolvedExtensionPath = resolveBuiltYomitanExtensionPath({
|
||||||
const defaultVendorPath = path.resolve(process.cwd(), 'vendor', 'yomitan');
|
explicitPath: extensionPath,
|
||||||
const candidates = [...(preferredPath ? [preferredPath] : []), defaultVendorPath];
|
cwd: process.cwd(),
|
||||||
|
});
|
||||||
|
const candidates = resolvedExtensionPath ? [resolvedExtensionPath] : [];
|
||||||
|
|
||||||
for (const candidate of candidates) {
|
for (const candidate of candidates) {
|
||||||
if (!candidate) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
try {
|
try {
|
||||||
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
||||||
const state = await createYomitanRuntimeState(userDataPath);
|
const state = await createYomitanRuntimeState(userDataPath, candidate);
|
||||||
if (state.available) {
|
if (state.available) {
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
@@ -613,7 +618,7 @@ async function createYomitanRuntimeStateWithSearch(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return createYomitanRuntimeState(userDataPath);
|
return createYomitanRuntimeState(userDataPath, resolvedExtensionPath ?? undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
async function getFrequencyLookup(dictionaryPath: string): Promise<FrequencyDictionaryLookup> {
|
async function getFrequencyLookup(dictionaryPath: string): Promise<FrequencyDictionaryLookup> {
|
||||||
|
|||||||
@@ -1,261 +1,16 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
#
|
|
||||||
# SubMiner - All-in-one sentence mining overlay
|
|
||||||
# Copyright (C) 2024 sudacode
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
#
|
|
||||||
# patch-yomitan.sh - Apply Electron compatibility patches to Yomitan
|
|
||||||
#
|
|
||||||
# This script applies the necessary patches to make Yomitan work in Electron
|
|
||||||
# after upgrading to a new version. Run this after extracting a fresh Yomitan release.
|
|
||||||
#
|
|
||||||
# Usage: ./patch-yomitan.sh [yomitan_dir]
|
|
||||||
# yomitan_dir: Path to the Yomitan directory (default: vendor/yomitan)
|
|
||||||
#
|
|
||||||
|
|
||||||
set -e
|
set -euo pipefail
|
||||||
|
|
||||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
cat <<'EOF'
|
||||||
YOMITAN_DIR="${1:-$SCRIPT_DIR/../vendor/yomitan}"
|
patch-yomitan.sh is retired.
|
||||||
|
|
||||||
if [ ! -d "$YOMITAN_DIR" ]; then
|
SubMiner now uses the forked source submodule at vendor/subminer-yomitan and builds the
|
||||||
echo "Error: Yomitan directory not found: $YOMITAN_DIR"
|
Chromium extension artifact into build/yomitan.
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Patching Yomitan in: $YOMITAN_DIR"
|
Use:
|
||||||
|
git submodule update --init --recursive
|
||||||
|
bun run build:yomitan
|
||||||
|
|
||||||
PERMISSIONS_UTIL="$YOMITAN_DIR/js/data/permissions-util.js"
|
If you need to change Electron compatibility behavior, patch the forked source repo and rebuild.
|
||||||
|
EOF
|
||||||
if [ ! -f "$PERMISSIONS_UTIL" ]; then
|
|
||||||
echo "Error: permissions-util.js not found at $PERMISSIONS_UTIL"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Patching permissions-util.js..."
|
|
||||||
|
|
||||||
if grep -q "Electron workaround" "$PERMISSIONS_UTIL"; then
|
|
||||||
echo " - Already patched, skipping"
|
|
||||||
else
|
|
||||||
cat > "$PERMISSIONS_UTIL.tmp" << 'PATCH_EOF'
|
|
||||||
/*
|
|
||||||
* Copyright (C) 2023-2025 Yomitan Authors
|
|
||||||
* Copyright (C) 2021-2022 Yomichan Authors
|
|
||||||
*
|
|
||||||
* This program is free software: you can redistribute it and/or modify
|
|
||||||
* it under the terms of the GNU General Public License as published by
|
|
||||||
* the Free Software Foundation, either version 3 of the License, or
|
|
||||||
* (at your option) any later version.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful,
|
|
||||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
* GNU General Public License for more details.
|
|
||||||
*
|
|
||||||
* You should have received a copy of the GNU General Public License
|
|
||||||
* along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import {getFieldMarkers} from './anki-util.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* This function returns whether an Anki field marker might require clipboard permissions.
|
|
||||||
* This is speculative and may not guarantee that the field marker actually does require the permission,
|
|
||||||
* as the custom handlebars template is not deeply inspected.
|
|
||||||
* @param {string} marker
|
|
||||||
* @returns {boolean}
|
|
||||||
*/
|
|
||||||
function ankiFieldMarkerMayUseClipboard(marker) {
|
|
||||||
switch (marker) {
|
|
||||||
case 'clipboard-image':
|
|
||||||
case 'clipboard-text':
|
|
||||||
return true;
|
|
||||||
default:
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {chrome.permissions.Permissions} permissions
|
|
||||||
* @returns {Promise<boolean>}
|
|
||||||
*/
|
|
||||||
export function hasPermissions(permissions) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
chrome.permissions.contains(permissions, (result) => {
|
|
||||||
const e = chrome.runtime.lastError;
|
|
||||||
if (e) {
|
|
||||||
reject(new Error(e.message));
|
|
||||||
} else {
|
|
||||||
resolve(result);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {chrome.permissions.Permissions} permissions
|
|
||||||
* @param {boolean} shouldHave
|
|
||||||
* @returns {Promise<boolean>}
|
|
||||||
*/
|
|
||||||
export function setPermissionsGranted(permissions, shouldHave) {
|
|
||||||
return (
|
|
||||||
shouldHave ?
|
|
||||||
new Promise((resolve, reject) => {
|
|
||||||
chrome.permissions.request(permissions, (result) => {
|
|
||||||
const e = chrome.runtime.lastError;
|
|
||||||
if (e) {
|
|
||||||
reject(new Error(e.message));
|
|
||||||
} else {
|
|
||||||
resolve(result);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}) :
|
|
||||||
new Promise((resolve, reject) => {
|
|
||||||
chrome.permissions.remove(permissions, (result) => {
|
|
||||||
const e = chrome.runtime.lastError;
|
|
||||||
if (e) {
|
|
||||||
reject(new Error(e.message));
|
|
||||||
} else {
|
|
||||||
resolve(!result);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<chrome.permissions.Permissions>}
|
|
||||||
*/
|
|
||||||
export function getAllPermissions() {
|
|
||||||
// Electron workaround - chrome.permissions.getAll() not available
|
|
||||||
return Promise.resolve({
|
|
||||||
origins: ["<all_urls>"],
|
|
||||||
permissions: ["clipboardWrite", "storage", "unlimitedStorage", "scripting", "contextMenus"]
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {string} fieldValue
|
|
||||||
* @returns {string[]}
|
|
||||||
*/
|
|
||||||
export function getRequiredPermissionsForAnkiFieldValue(fieldValue) {
|
|
||||||
const markers = getFieldMarkers(fieldValue);
|
|
||||||
for (const marker of markers) {
|
|
||||||
if (ankiFieldMarkerMayUseClipboard(marker)) {
|
|
||||||
return ['clipboardRead'];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {chrome.permissions.Permissions} permissions
|
|
||||||
* @param {import('settings').ProfileOptions} options
|
|
||||||
* @returns {boolean}
|
|
||||||
*/
|
|
||||||
export function hasRequiredPermissionsForOptions(permissions, options) {
|
|
||||||
const permissionsSet = new Set(permissions.permissions);
|
|
||||||
|
|
||||||
if (!permissionsSet.has('nativeMessaging') && (options.parsing.enableMecabParser || options.general.enableYomitanApi)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!permissionsSet.has('clipboardRead')) {
|
|
||||||
if (options.clipboard.enableBackgroundMonitor || options.clipboard.enableSearchPageMonitor) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const fieldsList = options.anki.cardFormats.map((cardFormat) => cardFormat.fields);
|
|
||||||
|
|
||||||
for (const fields of fieldsList) {
|
|
||||||
for (const {value: fieldValue} of Object.values(fields)) {
|
|
||||||
const markers = getFieldMarkers(fieldValue);
|
|
||||||
for (const marker of markers) {
|
|
||||||
if (ankiFieldMarkerMayUseClipboard(marker)) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
PATCH_EOF
|
|
||||||
|
|
||||||
mv "$PERMISSIONS_UTIL.tmp" "$PERMISSIONS_UTIL"
|
|
||||||
echo " - Patched successfully"
|
|
||||||
fi
|
|
||||||
|
|
||||||
OPTIONS_SCHEMA="$YOMITAN_DIR/data/schemas/options-schema.json"
|
|
||||||
|
|
||||||
if [ ! -f "$OPTIONS_SCHEMA" ]; then
|
|
||||||
echo "Error: options-schema.json not found at $OPTIONS_SCHEMA"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Patching options-schema.json..."
|
|
||||||
|
|
||||||
if grep -q '"selectText".*"default": true' "$OPTIONS_SCHEMA"; then
|
|
||||||
sed -i '/"selectText": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
|
|
||||||
echo " - Changed selectText default to false"
|
|
||||||
elif grep -q '"selectText".*"default": false' "$OPTIONS_SCHEMA"; then
|
|
||||||
echo " - selectText already set to false, skipping"
|
|
||||||
else
|
|
||||||
echo " - Warning: Could not find selectText setting"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if grep -q '"layoutAwareScan".*"default": true' "$OPTIONS_SCHEMA"; then
|
|
||||||
sed -i '/"layoutAwareScan": {/,/"default":/{s/"default": true/"default": false/}' "$OPTIONS_SCHEMA"
|
|
||||||
echo " - Changed layoutAwareScan default to false"
|
|
||||||
elif grep -q '"layoutAwareScan".*"default": false' "$OPTIONS_SCHEMA"; then
|
|
||||||
echo " - layoutAwareScan already set to false, skipping"
|
|
||||||
else
|
|
||||||
echo " - Warning: Could not find layoutAwareScan setting"
|
|
||||||
fi
|
|
||||||
|
|
||||||
POPUP_JS="$YOMITAN_DIR/js/app/popup.js"
|
|
||||||
|
|
||||||
if [ ! -f "$POPUP_JS" ]; then
|
|
||||||
echo "Error: popup.js not found at $POPUP_JS"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Patching popup.js..."
|
|
||||||
|
|
||||||
if grep -q "yomitan-popup-shown" "$POPUP_JS"; then
|
|
||||||
echo " - Already patched, skipping"
|
|
||||||
else
|
|
||||||
# Add the visibility event dispatch after the existing _onVisibleChange code
|
|
||||||
# We need to add it after: void this._invokeSafe('displayVisibilityChanged', {value});
|
|
||||||
sed -i "/void this._invokeSafe('displayVisibilityChanged', {value});/a\\
|
|
||||||
\\
|
|
||||||
// Dispatch custom events for popup visibility (Electron integration)\\
|
|
||||||
if (value) {\\
|
|
||||||
window.dispatchEvent(new CustomEvent('yomitan-popup-shown'));\\
|
|
||||||
} else {\\
|
|
||||||
window.dispatchEvent(new CustomEvent('yomitan-popup-hidden'));\\
|
|
||||||
}" "$POPUP_JS"
|
|
||||||
echo " - Added visibility events"
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "Yomitan patching complete!"
|
|
||||||
echo ""
|
|
||||||
echo "Changes applied:"
|
|
||||||
echo " 1. permissions-util.js: Hardcoded permissions (Electron workaround)"
|
|
||||||
echo " 2. options-schema.json: selectText=false, layoutAwareScan=false"
|
|
||||||
echo " 3. popup.js: Added yomitan-popup-shown/hidden events"
|
|
||||||
echo ""
|
|
||||||
echo "To verify: Run 'bun run dev' and check for 'Yomitan extension loaded successfully'"
|
|
||||||
|
|||||||
20
scripts/prettier-scope.sh
Normal file
20
scripts/prettier-scope.sh
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||||
|
cd "$ROOT_DIR"
|
||||||
|
|
||||||
|
paths=(
|
||||||
|
"package.json"
|
||||||
|
"tsconfig.json"
|
||||||
|
"tsconfig.renderer.json"
|
||||||
|
"tsconfig.typecheck.json"
|
||||||
|
".prettierrc.json"
|
||||||
|
".github"
|
||||||
|
"build"
|
||||||
|
"launcher"
|
||||||
|
"scripts"
|
||||||
|
"src"
|
||||||
|
)
|
||||||
|
|
||||||
|
exec bunx prettier "$@" "${paths[@]}"
|
||||||
@@ -4,6 +4,7 @@ import path from 'node:path';
|
|||||||
import process from 'node:process';
|
import process from 'node:process';
|
||||||
|
|
||||||
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
import { createTokenizerDepsRuntime, tokenizeSubtitle } from '../src/core/services/tokenizer.js';
|
||||||
|
import { resolveYomitanExtensionPath as resolveBuiltYomitanExtensionPath } from '../src/core/services/yomitan-extension-paths.js';
|
||||||
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
import { MecabTokenizer } from '../src/mecab-tokenizer.js';
|
||||||
import type { MergedToken } from '../src/types.js';
|
import type { MergedToken } from '../src/types.js';
|
||||||
|
|
||||||
@@ -112,12 +113,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-extension');
|
throw new Error('Missing value for --yomitan-extension');
|
||||||
}
|
}
|
||||||
yomitanExtensionPath = path.resolve(next);
|
yomitanExtensionPath = next;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-extension=')) {
|
if (arg.startsWith('--yomitan-extension=')) {
|
||||||
yomitanExtensionPath = path.resolve(arg.slice('--yomitan-extension='.length));
|
yomitanExtensionPath = arg.slice('--yomitan-extension='.length);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -126,12 +127,12 @@ function parseCliArgs(argv: string[]): CliOptions {
|
|||||||
if (!next) {
|
if (!next) {
|
||||||
throw new Error('Missing value for --yomitan-user-data');
|
throw new Error('Missing value for --yomitan-user-data');
|
||||||
}
|
}
|
||||||
yomitanUserDataPath = path.resolve(next);
|
yomitanUserDataPath = next;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (arg.startsWith('--yomitan-user-data=')) {
|
if (arg.startsWith('--yomitan-user-data=')) {
|
||||||
yomitanUserDataPath = path.resolve(arg.slice('--yomitan-user-data='.length));
|
yomitanUserDataPath = arg.slice('--yomitan-user-data='.length);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -372,21 +373,10 @@ function findSelectedCandidateIndexes(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function resolveYomitanExtensionPath(explicitPath?: string): string | null {
|
function resolveYomitanExtensionPath(explicitPath?: string): string | null {
|
||||||
const candidates = [
|
return resolveBuiltYomitanExtensionPath({
|
||||||
explicitPath ? path.resolve(explicitPath) : null,
|
explicitPath,
|
||||||
path.resolve(process.cwd(), 'vendor', 'yomitan'),
|
cwd: process.cwd(),
|
||||||
];
|
});
|
||||||
|
|
||||||
for (const candidate of candidates) {
|
|
||||||
if (!candidate) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (fs.existsSync(path.join(candidate, 'manifest.json'))) {
|
|
||||||
return candidate;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeState> {
|
async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeState> {
|
||||||
@@ -420,7 +410,7 @@ async function setupYomitanRuntime(options: CliOptions): Promise<YomitanRuntimeS
|
|||||||
|
|
||||||
const extensionPath = resolveYomitanExtensionPath(options.yomitanExtensionPath);
|
const extensionPath = resolveYomitanExtensionPath(options.yomitanExtensionPath);
|
||||||
if (!extensionPath) {
|
if (!extensionPath) {
|
||||||
state.note = 'no Yomitan extension directory found';
|
state.note = 'no built Yomitan extension directory found; run `bun run build:yomitan`';
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -55,7 +55,10 @@ test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
|
|||||||
assert.equal(normalized.proxy?.host, '0.0.0.0');
|
assert.equal(normalized.proxy?.host, '0.0.0.0');
|
||||||
assert.equal(normalized.proxy?.port, 7001);
|
assert.equal(normalized.proxy?.port, 7001);
|
||||||
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
|
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
|
||||||
assert.equal(normalized.media?.fallbackDuration, DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration);
|
assert.equal(
|
||||||
|
normalized.media?.fallbackDuration,
|
||||||
|
DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
|
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
|
||||||
@@ -70,10 +73,7 @@ test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled',
|
|||||||
|
|
||||||
runtime.start();
|
runtime.start();
|
||||||
|
|
||||||
assert.deepEqual(calls, [
|
assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']);
|
||||||
'known:start',
|
|
||||||
'proxy:start:127.0.0.1:9999:http://upstream:8765',
|
|
||||||
]);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
|
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
|
||||||
|
|||||||
@@ -31,8 +31,7 @@ function trimToNonEmptyString(value: unknown): string | null {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
|
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
|
||||||
const resolvedUrl =
|
const resolvedUrl = trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
|
||||||
trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
|
|
||||||
const proxySource =
|
const proxySource =
|
||||||
config.proxy && typeof config.proxy === 'object'
|
config.proxy && typeof config.proxy === 'object'
|
||||||
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
|
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
|
||||||
|
|||||||
@@ -169,4 +169,9 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
|
|||||||
assert.equal(background.background, true);
|
assert.equal(background.background, true);
|
||||||
assert.equal(hasExplicitCommand(background), true);
|
assert.equal(hasExplicitCommand(background), true);
|
||||||
assert.equal(shouldStartApp(background), true);
|
assert.equal(shouldStartApp(background), true);
|
||||||
|
|
||||||
|
const setup = parseArgs(['--setup']);
|
||||||
|
assert.equal((setup as typeof setup & { setup?: boolean }).setup, true);
|
||||||
|
assert.equal(hasExplicitCommand(setup), true);
|
||||||
|
assert.equal(shouldStartApp(setup), true);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ export interface CliArgs {
|
|||||||
toggle: boolean;
|
toggle: boolean;
|
||||||
toggleVisibleOverlay: boolean;
|
toggleVisibleOverlay: boolean;
|
||||||
settings: boolean;
|
settings: boolean;
|
||||||
|
setup: boolean;
|
||||||
show: boolean;
|
show: boolean;
|
||||||
hide: boolean;
|
hide: boolean;
|
||||||
showVisibleOverlay: boolean;
|
showVisibleOverlay: boolean;
|
||||||
@@ -71,6 +72,7 @@ export function parseArgs(argv: string[]): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
|
setup: false,
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
@@ -125,6 +127,7 @@ export function parseArgs(argv: string[]): CliArgs {
|
|||||||
else if (arg === '--toggle') args.toggle = true;
|
else if (arg === '--toggle') args.toggle = true;
|
||||||
else if (arg === '--toggle-visible-overlay') args.toggleVisibleOverlay = true;
|
else if (arg === '--toggle-visible-overlay') args.toggleVisibleOverlay = true;
|
||||||
else if (arg === '--settings' || arg === '--yomitan') args.settings = true;
|
else if (arg === '--settings' || arg === '--yomitan') args.settings = true;
|
||||||
|
else if (arg === '--setup') args.setup = true;
|
||||||
else if (arg === '--show') args.show = true;
|
else if (arg === '--show') args.show = true;
|
||||||
else if (arg === '--hide') args.hide = true;
|
else if (arg === '--hide') args.hide = true;
|
||||||
else if (arg === '--show-visible-overlay') args.showVisibleOverlay = true;
|
else if (arg === '--show-visible-overlay') args.showVisibleOverlay = true;
|
||||||
@@ -298,6 +301,7 @@ export function hasExplicitCommand(args: CliArgs): boolean {
|
|||||||
args.toggle ||
|
args.toggle ||
|
||||||
args.toggleVisibleOverlay ||
|
args.toggleVisibleOverlay ||
|
||||||
args.settings ||
|
args.settings ||
|
||||||
|
args.setup ||
|
||||||
args.show ||
|
args.show ||
|
||||||
args.hide ||
|
args.hide ||
|
||||||
args.showVisibleOverlay ||
|
args.showVisibleOverlay ||
|
||||||
@@ -341,6 +345,7 @@ export function shouldStartApp(args: CliArgs): boolean {
|
|||||||
args.toggle ||
|
args.toggle ||
|
||||||
args.toggleVisibleOverlay ||
|
args.toggleVisibleOverlay ||
|
||||||
args.settings ||
|
args.settings ||
|
||||||
|
args.setup ||
|
||||||
args.copySubtitle ||
|
args.copySubtitle ||
|
||||||
args.copySubtitleMultiple ||
|
args.copySubtitleMultiple ||
|
||||||
args.mineSentence ||
|
args.mineSentence ||
|
||||||
@@ -371,6 +376,7 @@ export function shouldRunSettingsOnlyStartup(args: CliArgs): boolean {
|
|||||||
!args.toggleVisibleOverlay &&
|
!args.toggleVisibleOverlay &&
|
||||||
!args.show &&
|
!args.show &&
|
||||||
!args.hide &&
|
!args.hide &&
|
||||||
|
!args.setup &&
|
||||||
!args.showVisibleOverlay &&
|
!args.showVisibleOverlay &&
|
||||||
!args.hideVisibleOverlay &&
|
!args.hideVisibleOverlay &&
|
||||||
!args.copySubtitle &&
|
!args.copySubtitle &&
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ test('printHelp includes configured texthooker port', () => {
|
|||||||
assert.match(output, /--help\s+Show this help/);
|
assert.match(output, /--help\s+Show this help/);
|
||||||
assert.match(output, /default: 7777/);
|
assert.match(output, /default: 7777/);
|
||||||
assert.match(output, /--refresh-known-words/);
|
assert.match(output, /--refresh-known-words/);
|
||||||
|
assert.match(output, /--setup\s+Open first-run setup window/);
|
||||||
assert.match(output, /--anilist-status/);
|
assert.match(output, /--anilist-status/);
|
||||||
assert.match(output, /--anilist-retry-queue/);
|
assert.match(output, /--anilist-retry-queue/);
|
||||||
assert.match(output, /--dictionary/);
|
assert.match(output, /--dictionary/);
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ ${B}Overlay${R}
|
|||||||
--show-visible-overlay Show subtitle overlay
|
--show-visible-overlay Show subtitle overlay
|
||||||
--hide-visible-overlay Hide subtitle overlay
|
--hide-visible-overlay Hide subtitle overlay
|
||||||
--settings Open Yomitan settings window
|
--settings Open Yomitan settings window
|
||||||
|
--setup Open first-run setup window
|
||||||
--auto-start-overlay Auto-hide mpv subs, show overlay on connect
|
--auto-start-overlay Auto-hide mpv subs, show overlay on connect
|
||||||
|
|
||||||
${B}Mining${R}
|
${B}Mining${R}
|
||||||
|
|||||||
@@ -16,6 +16,9 @@ test('loads defaults when config is missing', () => {
|
|||||||
const service = new ConfigService(dir);
|
const service = new ConfigService(dir);
|
||||||
const config = service.getConfig();
|
const config = service.getConfig();
|
||||||
assert.equal(config.websocket.port, DEFAULT_CONFIG.websocket.port);
|
assert.equal(config.websocket.port, DEFAULT_CONFIG.websocket.port);
|
||||||
|
assert.equal(config.annotationWebsocket.enabled, DEFAULT_CONFIG.annotationWebsocket.enabled);
|
||||||
|
assert.equal(config.annotationWebsocket.port, DEFAULT_CONFIG.annotationWebsocket.port);
|
||||||
|
assert.equal(config.texthooker.launchAtStartup, true);
|
||||||
assert.equal(config.ankiConnect.behavior.autoUpdateNewCards, true);
|
assert.equal(config.ankiConnect.behavior.autoUpdateNewCards, true);
|
||||||
assert.deepEqual(config.ankiConnect.tags, ['SubMiner']);
|
assert.deepEqual(config.ankiConnect.tags, ['SubMiner']);
|
||||||
assert.equal(config.anilist.enabled, false);
|
assert.equal(config.anilist.enabled, false);
|
||||||
@@ -24,6 +27,9 @@ test('loads defaults when config is missing', () => {
|
|||||||
assert.equal(config.anilist.characterDictionary.maxLoaded, 3);
|
assert.equal(config.anilist.characterDictionary.maxLoaded, 3);
|
||||||
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, false);
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, false);
|
||||||
assert.equal(config.jellyfin.remoteControlEnabled, true);
|
assert.equal(config.jellyfin.remoteControlEnabled, true);
|
||||||
assert.equal(config.jellyfin.remoteControlAutoConnect, true);
|
assert.equal(config.jellyfin.remoteControlAutoConnect, true);
|
||||||
assert.equal(config.jellyfin.autoAnnounce, false);
|
assert.equal(config.jellyfin.autoAnnounce, false);
|
||||||
@@ -128,6 +134,88 @@ test('parses subtitleStyle.preserveLineBreaks and warns on invalid values', () =
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('parses texthooker.launchAtStartup and warns on invalid values', () => {
|
||||||
|
const validDir = makeTempDir();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(validDir, 'config.jsonc'),
|
||||||
|
`{
|
||||||
|
"texthooker": {
|
||||||
|
"launchAtStartup": false
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const validService = new ConfigService(validDir);
|
||||||
|
assert.equal(validService.getConfig().texthooker.launchAtStartup, false);
|
||||||
|
|
||||||
|
const invalidDir = makeTempDir();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(invalidDir, 'config.jsonc'),
|
||||||
|
`{
|
||||||
|
"texthooker": {
|
||||||
|
"launchAtStartup": "yes"
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const invalidService = new ConfigService(invalidDir);
|
||||||
|
assert.equal(
|
||||||
|
invalidService.getConfig().texthooker.launchAtStartup,
|
||||||
|
DEFAULT_CONFIG.texthooker.launchAtStartup,
|
||||||
|
);
|
||||||
|
assert.ok(
|
||||||
|
invalidService.getWarnings().some((warning) => warning.path === 'texthooker.launchAtStartup'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parses annotationWebsocket settings and warns on invalid values', () => {
|
||||||
|
const validDir = makeTempDir();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(validDir, 'config.jsonc'),
|
||||||
|
`{
|
||||||
|
"annotationWebsocket": {
|
||||||
|
"enabled": false,
|
||||||
|
"port": 7788
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const validService = new ConfigService(validDir);
|
||||||
|
assert.equal(validService.getConfig().annotationWebsocket.enabled, false);
|
||||||
|
assert.equal(validService.getConfig().annotationWebsocket.port, 7788);
|
||||||
|
|
||||||
|
const invalidDir = makeTempDir();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(invalidDir, 'config.jsonc'),
|
||||||
|
`{
|
||||||
|
"annotationWebsocket": {
|
||||||
|
"enabled": "yes",
|
||||||
|
"port": "bad"
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const invalidService = new ConfigService(invalidDir);
|
||||||
|
assert.equal(
|
||||||
|
invalidService.getConfig().annotationWebsocket.enabled,
|
||||||
|
DEFAULT_CONFIG.annotationWebsocket.enabled,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
invalidService.getConfig().annotationWebsocket.port,
|
||||||
|
DEFAULT_CONFIG.annotationWebsocket.port,
|
||||||
|
);
|
||||||
|
assert.ok(
|
||||||
|
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.enabled'),
|
||||||
|
);
|
||||||
|
assert.ok(
|
||||||
|
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.port'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', () => {
|
test('parses subtitleStyle.autoPauseVideoOnHover and warns on invalid values', () => {
|
||||||
const validDir = makeTempDir();
|
const validDir = makeTempDir();
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
@@ -256,8 +344,8 @@ test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
|
|||||||
|
|
||||||
const validService = new ConfigService(validDir);
|
const validService = new ConfigService(validDir);
|
||||||
assert.equal(
|
assert.equal(
|
||||||
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>).nameMatchColor ??
|
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
|
||||||
null) as string | null,
|
.nameMatchColor ?? null) as string | null,
|
||||||
'#eed49f',
|
'#eed49f',
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -279,9 +367,7 @@ test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
|
|||||||
'#f5bde6',
|
'#f5bde6',
|
||||||
);
|
);
|
||||||
assert.ok(
|
assert.ok(
|
||||||
invalidService
|
invalidService.getWarnings().some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
|
||||||
.getWarnings()
|
|
||||||
.some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
|
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -411,10 +497,49 @@ test('parses anilist.characterDictionary config with clamping and enum validatio
|
|||||||
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
|
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
|
||||||
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
|
||||||
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'));
|
assert.ok(
|
||||||
|
warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'),
|
||||||
|
);
|
||||||
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
|
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
|
||||||
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'));
|
assert.ok(
|
||||||
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'));
|
warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'),
|
||||||
|
);
|
||||||
|
assert.ok(
|
||||||
|
warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parses anilist.characterDictionary.collapsibleSections booleans and warns on invalid values', () => {
|
||||||
|
const dir = makeTempDir();
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(dir, 'config.jsonc'),
|
||||||
|
`{
|
||||||
|
"anilist": {
|
||||||
|
"characterDictionary": {
|
||||||
|
"collapsibleSections": {
|
||||||
|
"description": true,
|
||||||
|
"characterInformation": "yes",
|
||||||
|
"voicedBy": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`,
|
||||||
|
'utf-8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const service = new ConfigService(dir);
|
||||||
|
const config = service.getConfig();
|
||||||
|
const warnings = service.getWarnings();
|
||||||
|
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.description, true);
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.characterInformation, false);
|
||||||
|
assert.equal(config.anilist.characterDictionary.collapsibleSections.voicedBy, true);
|
||||||
|
assert.ok(
|
||||||
|
warnings.some(
|
||||||
|
(warning) =>
|
||||||
|
warning.path === 'anilist.characterDictionary.collapsibleSections.characterInformation',
|
||||||
|
),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('parses jellyfin remote control fields', () => {
|
test('parses jellyfin remote control fields', () => {
|
||||||
@@ -840,6 +965,10 @@ test('warning emission order is deterministic across reloads', () => {
|
|||||||
"enabled": "sometimes",
|
"enabled": "sometimes",
|
||||||
"port": -1
|
"port": -1
|
||||||
},
|
},
|
||||||
|
"annotationWebsocket": {
|
||||||
|
"enabled": "sometimes",
|
||||||
|
"port": -1
|
||||||
|
},
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "trace"
|
"level": "trace"
|
||||||
}
|
}
|
||||||
@@ -856,7 +985,14 @@ test('warning emission order is deterministic across reloads', () => {
|
|||||||
assert.deepEqual(secondWarnings, firstWarnings);
|
assert.deepEqual(secondWarnings, firstWarnings);
|
||||||
assert.deepEqual(
|
assert.deepEqual(
|
||||||
firstWarnings.map((warning) => warning.path),
|
firstWarnings.map((warning) => warning.path),
|
||||||
['unknownFeature', 'websocket.enabled', 'websocket.port', 'logging.level'],
|
[
|
||||||
|
'unknownFeature',
|
||||||
|
'websocket.enabled',
|
||||||
|
'websocket.port',
|
||||||
|
'annotationWebsocket.enabled',
|
||||||
|
'annotationWebsocket.port',
|
||||||
|
'logging.level',
|
||||||
|
],
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -1426,8 +1562,17 @@ test('template generator includes known keys', () => {
|
|||||||
output,
|
output,
|
||||||
/"enabled": "auto",? \/\/ Built-in subtitle websocket server mode\. Values: auto \| true \| false/,
|
/"enabled": "auto",? \/\/ Built-in subtitle websocket server mode\. Values: auto \| true \| false/,
|
||||||
);
|
);
|
||||||
|
assert.match(
|
||||||
|
output,
|
||||||
|
/"enabled": true,? \/\/ Annotated subtitle websocket server enabled state\. Values: true \| false/,
|
||||||
|
);
|
||||||
|
assert.match(output, /"port": 6678,? \/\/ Annotated subtitle websocket server port\./);
|
||||||
assert.match(
|
assert.match(
|
||||||
output,
|
output,
|
||||||
/"enabled": false,? \/\/ Enable AnkiConnect integration\. Values: true \| false/,
|
/"enabled": false,? \/\/ Enable AnkiConnect integration\. Values: true \| false/,
|
||||||
);
|
);
|
||||||
|
assert.match(
|
||||||
|
output,
|
||||||
|
/"launchAtStartup": true,? \/\/ Launch texthooker server automatically when SubMiner starts\. Values: true \| false/,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -22,6 +22,7 @@ const {
|
|||||||
subtitlePosition,
|
subtitlePosition,
|
||||||
keybindings,
|
keybindings,
|
||||||
websocket,
|
websocket,
|
||||||
|
annotationWebsocket,
|
||||||
logging,
|
logging,
|
||||||
texthooker,
|
texthooker,
|
||||||
shortcuts,
|
shortcuts,
|
||||||
@@ -39,6 +40,7 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
|
|||||||
subtitlePosition,
|
subtitlePosition,
|
||||||
keybindings,
|
keybindings,
|
||||||
websocket,
|
websocket,
|
||||||
|
annotationWebsocket,
|
||||||
logging,
|
logging,
|
||||||
texthooker,
|
texthooker,
|
||||||
ankiConnect,
|
ankiConnect,
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ export const CORE_DEFAULT_CONFIG: Pick<
|
|||||||
| 'subtitlePosition'
|
| 'subtitlePosition'
|
||||||
| 'keybindings'
|
| 'keybindings'
|
||||||
| 'websocket'
|
| 'websocket'
|
||||||
|
| 'annotationWebsocket'
|
||||||
| 'logging'
|
| 'logging'
|
||||||
| 'texthooker'
|
| 'texthooker'
|
||||||
| 'shortcuts'
|
| 'shortcuts'
|
||||||
@@ -19,10 +20,15 @@ export const CORE_DEFAULT_CONFIG: Pick<
|
|||||||
enabled: 'auto',
|
enabled: 'auto',
|
||||||
port: 6677,
|
port: 6677,
|
||||||
},
|
},
|
||||||
|
annotationWebsocket: {
|
||||||
|
enabled: true,
|
||||||
|
port: 6678,
|
||||||
|
},
|
||||||
logging: {
|
logging: {
|
||||||
level: 'info',
|
level: 'info',
|
||||||
},
|
},
|
||||||
texthooker: {
|
texthooker: {
|
||||||
|
launchAtStartup: true,
|
||||||
openBrowser: true,
|
openBrowser: true,
|
||||||
},
|
},
|
||||||
shortcuts: {
|
shortcuts: {
|
||||||
|
|||||||
@@ -92,6 +92,11 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
|
|||||||
maxLoaded: 3,
|
maxLoaded: 3,
|
||||||
evictionPolicy: 'delete',
|
evictionPolicy: 'delete',
|
||||||
profileScope: 'all',
|
profileScope: 'all',
|
||||||
|
collapsibleSections: {
|
||||||
|
description: false,
|
||||||
|
characterInformation: false,
|
||||||
|
voicedBy: false,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
jellyfin: {
|
jellyfin: {
|
||||||
|
|||||||
@@ -18,11 +18,13 @@ test('config option registry includes critical paths and has unique entries', ()
|
|||||||
|
|
||||||
for (const requiredPath of [
|
for (const requiredPath of [
|
||||||
'logging.level',
|
'logging.level',
|
||||||
|
'annotationWebsocket.enabled',
|
||||||
'startupWarmups.lowPowerMode',
|
'startupWarmups.lowPowerMode',
|
||||||
'subtitleStyle.enableJlpt',
|
'subtitleStyle.enableJlpt',
|
||||||
'subtitleStyle.autoPauseVideoOnYomitanPopup',
|
'subtitleStyle.autoPauseVideoOnYomitanPopup',
|
||||||
'ankiConnect.enabled',
|
'ankiConnect.enabled',
|
||||||
'anilist.characterDictionary.enabled',
|
'anilist.characterDictionary.enabled',
|
||||||
|
'anilist.characterDictionary.collapsibleSections.description',
|
||||||
'immersionTracking.enabled',
|
'immersionTracking.enabled',
|
||||||
]) {
|
]) {
|
||||||
assert.ok(paths.includes(requiredPath), `missing config path: ${requiredPath}`);
|
assert.ok(paths.includes(requiredPath), `missing config path: ${requiredPath}`);
|
||||||
@@ -35,6 +37,7 @@ test('config template sections include expected domains and unique keys', () =>
|
|||||||
const keys = CONFIG_TEMPLATE_SECTIONS.map((section) => section.key);
|
const keys = CONFIG_TEMPLATE_SECTIONS.map((section) => section.key);
|
||||||
const requiredKeys: (typeof keys)[number][] = [
|
const requiredKeys: (typeof keys)[number][] = [
|
||||||
'websocket',
|
'websocket',
|
||||||
|
'annotationWebsocket',
|
||||||
'startupWarmups',
|
'startupWarmups',
|
||||||
'subtitleStyle',
|
'subtitleStyle',
|
||||||
'ankiConnect',
|
'ankiConnect',
|
||||||
|
|||||||
@@ -12,6 +12,12 @@ export function buildCoreConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.logging.level,
|
defaultValue: defaultConfig.logging.level,
|
||||||
description: 'Minimum log level for runtime logging.',
|
description: 'Minimum log level for runtime logging.',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
path: 'texthooker.launchAtStartup',
|
||||||
|
kind: 'boolean',
|
||||||
|
defaultValue: defaultConfig.texthooker.launchAtStartup,
|
||||||
|
description: 'Launch texthooker server automatically when SubMiner starts.',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
path: 'websocket.enabled',
|
path: 'websocket.enabled',
|
||||||
kind: 'enum',
|
kind: 'enum',
|
||||||
@@ -25,6 +31,18 @@ export function buildCoreConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.websocket.port,
|
defaultValue: defaultConfig.websocket.port,
|
||||||
description: 'Built-in subtitle websocket server port.',
|
description: 'Built-in subtitle websocket server port.',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
path: 'annotationWebsocket.enabled',
|
||||||
|
kind: 'boolean',
|
||||||
|
defaultValue: defaultConfig.annotationWebsocket.enabled,
|
||||||
|
description: 'Annotated subtitle websocket server enabled state.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: 'annotationWebsocket.port',
|
||||||
|
kind: 'number',
|
||||||
|
defaultValue: defaultConfig.annotationWebsocket.port,
|
||||||
|
description: 'Annotated subtitle websocket server port.',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
path: 'subsync.defaultMode',
|
path: 'subsync.defaultMode',
|
||||||
kind: 'enum',
|
kind: 'enum',
|
||||||
|
|||||||
@@ -171,6 +171,28 @@ export function buildIntegrationConfigOptionRegistry(
|
|||||||
defaultValue: defaultConfig.anilist.characterDictionary.profileScope,
|
defaultValue: defaultConfig.anilist.characterDictionary.profileScope,
|
||||||
description: 'Yomitan profile scope for dictionary enable/disable updates.',
|
description: 'Yomitan profile scope for dictionary enable/disable updates.',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
path: 'anilist.characterDictionary.collapsibleSections.description',
|
||||||
|
kind: 'boolean',
|
||||||
|
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.description,
|
||||||
|
description:
|
||||||
|
'Open the Description section by default in character dictionary glossary entries.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: 'anilist.characterDictionary.collapsibleSections.characterInformation',
|
||||||
|
kind: 'boolean',
|
||||||
|
defaultValue:
|
||||||
|
defaultConfig.anilist.characterDictionary.collapsibleSections.characterInformation,
|
||||||
|
description:
|
||||||
|
'Open the Character Information section by default in character dictionary glossary entries.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: 'anilist.characterDictionary.collapsibleSections.voicedBy',
|
||||||
|
kind: 'boolean',
|
||||||
|
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.voicedBy,
|
||||||
|
description:
|
||||||
|
'Open the Voiced by section by default in character dictionary glossary entries.',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
path: 'jellyfin.enabled',
|
path: 'jellyfin.enabled',
|
||||||
kind: 'boolean',
|
kind: 'boolean',
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
title: 'Texthooker Server',
|
title: 'Texthooker Server',
|
||||||
description: ['Control whether browser opens automatically for texthooker.'],
|
description: ['Configure texthooker startup launch and browser opening behavior.'],
|
||||||
key: 'texthooker',
|
key: 'texthooker',
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -21,6 +21,14 @@ const CORE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
|||||||
],
|
],
|
||||||
key: 'websocket',
|
key: 'websocket',
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
title: 'Annotation WebSocket',
|
||||||
|
description: [
|
||||||
|
'Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.',
|
||||||
|
'Independent from websocket.auto and defaults to port 6678.',
|
||||||
|
],
|
||||||
|
key: 'annotationWebsocket',
|
||||||
|
},
|
||||||
{
|
{
|
||||||
title: 'Logging',
|
title: 'Logging',
|
||||||
description: ['Controls logging verbosity.', 'Set to debug for full runtime diagnostics.'],
|
description: ['Controls logging verbosity.', 'Set to debug for full runtime diagnostics.'],
|
||||||
|
|||||||
@@ -5,6 +5,18 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
|
|||||||
const { src, resolved, warn } = context;
|
const { src, resolved, warn } = context;
|
||||||
|
|
||||||
if (isObject(src.texthooker)) {
|
if (isObject(src.texthooker)) {
|
||||||
|
const launchAtStartup = asBoolean(src.texthooker.launchAtStartup);
|
||||||
|
if (launchAtStartup !== undefined) {
|
||||||
|
resolved.texthooker.launchAtStartup = launchAtStartup;
|
||||||
|
} else if (src.texthooker.launchAtStartup !== undefined) {
|
||||||
|
warn(
|
||||||
|
'texthooker.launchAtStartup',
|
||||||
|
src.texthooker.launchAtStartup,
|
||||||
|
resolved.texthooker.launchAtStartup,
|
||||||
|
'Expected boolean.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
const openBrowser = asBoolean(src.texthooker.openBrowser);
|
const openBrowser = asBoolean(src.texthooker.openBrowser);
|
||||||
if (openBrowser !== undefined) {
|
if (openBrowser !== undefined) {
|
||||||
resolved.texthooker.openBrowser = openBrowser;
|
resolved.texthooker.openBrowser = openBrowser;
|
||||||
@@ -44,6 +56,32 @@ export function applyCoreDomainConfig(context: ResolveContext): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isObject(src.annotationWebsocket)) {
|
||||||
|
const enabled = asBoolean(src.annotationWebsocket.enabled);
|
||||||
|
if (enabled !== undefined) {
|
||||||
|
resolved.annotationWebsocket.enabled = enabled;
|
||||||
|
} else if (src.annotationWebsocket.enabled !== undefined) {
|
||||||
|
warn(
|
||||||
|
'annotationWebsocket.enabled',
|
||||||
|
src.annotationWebsocket.enabled,
|
||||||
|
resolved.annotationWebsocket.enabled,
|
||||||
|
'Expected boolean.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const port = asNumber(src.annotationWebsocket.port);
|
||||||
|
if (port !== undefined && port > 0 && port <= 65535) {
|
||||||
|
resolved.annotationWebsocket.port = Math.floor(port);
|
||||||
|
} else if (src.annotationWebsocket.port !== undefined) {
|
||||||
|
warn(
|
||||||
|
'annotationWebsocket.port',
|
||||||
|
src.annotationWebsocket.port,
|
||||||
|
resolved.annotationWebsocket.port,
|
||||||
|
'Expected integer between 1 and 65535.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (isObject(src.logging)) {
|
if (isObject(src.logging)) {
|
||||||
const logLevel = asString(src.logging.level);
|
const logLevel = asString(src.logging.level);
|
||||||
if (
|
if (
|
||||||
|
|||||||
@@ -124,6 +124,31 @@ export function applyIntegrationConfig(context: ResolveContext): void {
|
|||||||
'Expected string.',
|
'Expected string.',
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isObject(characterDictionary.collapsibleSections)) {
|
||||||
|
const collapsibleSections = characterDictionary.collapsibleSections;
|
||||||
|
const keys = ['description', 'characterInformation', 'voicedBy'] as const;
|
||||||
|
for (const key of keys) {
|
||||||
|
const value = asBoolean(collapsibleSections[key]);
|
||||||
|
if (value !== undefined) {
|
||||||
|
resolved.anilist.characterDictionary.collapsibleSections[key] = value;
|
||||||
|
} else if (collapsibleSections[key] !== undefined) {
|
||||||
|
warn(
|
||||||
|
`anilist.characterDictionary.collapsibleSections.${key}`,
|
||||||
|
collapsibleSections[key],
|
||||||
|
resolved.anilist.characterDictionary.collapsibleSections[key],
|
||||||
|
'Expected boolean.',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (characterDictionary.collapsibleSections !== undefined) {
|
||||||
|
warn(
|
||||||
|
'anilist.characterDictionary.collapsibleSections',
|
||||||
|
characterDictionary.collapsibleSections,
|
||||||
|
resolved.anilist.characterDictionary.collapsibleSections,
|
||||||
|
'Expected object.',
|
||||||
|
);
|
||||||
|
}
|
||||||
} else if (src.anilist.characterDictionary !== undefined) {
|
} else if (src.anilist.characterDictionary !== undefined) {
|
||||||
warn(
|
warn(
|
||||||
'anilist.characterDictionary',
|
'anilist.characterDictionary',
|
||||||
|
|||||||
@@ -72,6 +72,11 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
|
|||||||
maxLoaded: 99,
|
maxLoaded: 99,
|
||||||
evictionPolicy: 'purge' as never,
|
evictionPolicy: 'purge' as never,
|
||||||
profileScope: 'global' as never,
|
profileScope: 'global' as never,
|
||||||
|
collapsibleSections: {
|
||||||
|
description: true,
|
||||||
|
characterInformation: 'invalid' as never,
|
||||||
|
voicedBy: true,
|
||||||
|
} as never,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
@@ -83,10 +88,19 @@ test('anilist character dictionary fields are parsed, clamped, and enum-validate
|
|||||||
assert.equal(context.resolved.anilist.characterDictionary.maxLoaded, 20);
|
assert.equal(context.resolved.anilist.characterDictionary.maxLoaded, 20);
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.evictionPolicy, 'delete');
|
assert.equal(context.resolved.anilist.characterDictionary.evictionPolicy, 'delete');
|
||||||
assert.equal(context.resolved.anilist.characterDictionary.profileScope, 'all');
|
assert.equal(context.resolved.anilist.characterDictionary.profileScope, 'all');
|
||||||
|
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.description, true);
|
||||||
|
assert.equal(
|
||||||
|
context.resolved.anilist.characterDictionary.collapsibleSections.characterInformation,
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert.equal(context.resolved.anilist.characterDictionary.collapsibleSections.voicedBy, true);
|
||||||
|
|
||||||
const warnedPaths = warnings.map((warning) => warning.path);
|
const warnedPaths = warnings.map((warning) => warning.path);
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.refreshTtlHours'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.refreshTtlHours'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.maxLoaded'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.maxLoaded'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.evictionPolicy'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.evictionPolicy'));
|
||||||
assert.ok(warnedPaths.includes('anilist.characterDictionary.profileScope'));
|
assert.ok(warnedPaths.includes('anilist.characterDictionary.profileScope'));
|
||||||
|
assert.ok(
|
||||||
|
warnedPaths.includes('anilist.characterDictionary.collapsibleSections.characterInformation'),
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -238,7 +238,9 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
|
|||||||
);
|
);
|
||||||
if (nameMatchEnabled !== undefined) {
|
if (nameMatchEnabled !== undefined) {
|
||||||
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
|
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
|
||||||
} else if ((src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined) {
|
} else if (
|
||||||
|
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined
|
||||||
|
) {
|
||||||
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
|
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
|
||||||
warn(
|
warn(
|
||||||
'subtitleStyle.nameMatchEnabled',
|
'subtitleStyle.nameMatchEnabled',
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
|
setup: false,
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ import { AppReadyRuntimeDeps, runAppReadyRuntime } from './startup';
|
|||||||
|
|
||||||
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||||
const calls: string[] = [];
|
const calls: string[] = [];
|
||||||
const deps: AppReadyRuntimeDeps = {
|
const deps = {
|
||||||
|
ensureDefaultConfigBootstrap: () => calls.push('ensureDefaultConfigBootstrap'),
|
||||||
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
|
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
|
||||||
resolveKeybindings: () => calls.push('resolveKeybindings'),
|
resolveKeybindings: () => calls.push('resolveKeybindings'),
|
||||||
createMpvClient: () => calls.push('createMpvClient'),
|
createMpvClient: () => calls.push('createMpvClient'),
|
||||||
@@ -20,8 +21,13 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
|
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
|
||||||
defaultSecondarySubMode: 'hover',
|
defaultSecondarySubMode: 'hover',
|
||||||
defaultWebsocketPort: 9001,
|
defaultWebsocketPort: 9001,
|
||||||
|
defaultAnnotationWebsocketPort: 6678,
|
||||||
|
defaultTexthookerPort: 5174,
|
||||||
hasMpvWebsocketPlugin: () => true,
|
hasMpvWebsocketPlugin: () => true,
|
||||||
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
|
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
|
||||||
|
startAnnotationWebsocket: (port) => calls.push(`startAnnotationWebsocket:${port}`),
|
||||||
|
startTexthooker: (port, websocketUrl) =>
|
||||||
|
calls.push(`startTexthooker:${port}:${websocketUrl ?? ''}`),
|
||||||
log: (message) => calls.push(`log:${message}`),
|
log: (message) => calls.push(`log:${message}`),
|
||||||
createMecabTokenizerAndCheck: async () => {
|
createMecabTokenizerAndCheck: async () => {
|
||||||
calls.push('createMecabTokenizerAndCheck');
|
calls.push('createMecabTokenizerAndCheck');
|
||||||
@@ -34,6 +40,9 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
loadYomitanExtension: async () => {
|
loadYomitanExtension: async () => {
|
||||||
calls.push('loadYomitanExtension');
|
calls.push('loadYomitanExtension');
|
||||||
},
|
},
|
||||||
|
handleFirstRunSetup: async () => {
|
||||||
|
calls.push('handleFirstRunSetup');
|
||||||
|
},
|
||||||
prewarmSubtitleDictionaries: async () => {
|
prewarmSubtitleDictionaries: async () => {
|
||||||
calls.push('prewarmSubtitleDictionaries');
|
calls.push('prewarmSubtitleDictionaries');
|
||||||
},
|
},
|
||||||
@@ -48,7 +57,7 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
|||||||
logDebug: (message) => calls.push(`debug:${message}`),
|
logDebug: (message) => calls.push(`debug:${message}`),
|
||||||
now: () => 1000,
|
now: () => 1000,
|
||||||
...overrides,
|
...overrides,
|
||||||
};
|
} as AppReadyRuntimeDeps;
|
||||||
return { deps, calls };
|
return { deps, calls };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -57,7 +66,9 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
|||||||
hasMpvWebsocketPlugin: () => false,
|
hasMpvWebsocketPlugin: () => false,
|
||||||
});
|
});
|
||||||
await runAppReadyRuntime(deps);
|
await runAppReadyRuntime(deps);
|
||||||
|
assert.ok(calls.includes('ensureDefaultConfigBootstrap'));
|
||||||
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
|
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
|
||||||
|
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
||||||
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
|
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
|
||||||
assert.ok(calls.includes('initializeOverlayRuntime'));
|
assert.ok(calls.includes('initializeOverlayRuntime'));
|
||||||
assert.ok(
|
assert.ok(
|
||||||
@@ -71,6 +82,46 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('runAppReadyRuntime starts texthooker on startup when enabled in config', async () => {
|
||||||
|
const { deps, calls } = makeDeps({
|
||||||
|
getResolvedConfig: () => ({
|
||||||
|
websocket: { enabled: 'auto' },
|
||||||
|
secondarySub: {},
|
||||||
|
texthooker: { launchAtStartup: true },
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
await runAppReadyRuntime(deps);
|
||||||
|
|
||||||
|
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
||||||
|
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
||||||
|
assert.ok(
|
||||||
|
calls.indexOf('createMpvClient') < calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678'),
|
||||||
|
);
|
||||||
|
assert.ok(
|
||||||
|
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') < calls.indexOf('handleInitialArgs'),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('runAppReadyRuntime keeps annotation websocket enabled when regular websocket auto-skips', async () => {
|
||||||
|
const { deps, calls } = makeDeps({
|
||||||
|
getResolvedConfig: () => ({
|
||||||
|
websocket: { enabled: 'auto' },
|
||||||
|
annotationWebsocket: { enabled: true, port: 6678 },
|
||||||
|
secondarySub: {},
|
||||||
|
texthooker: { launchAtStartup: true },
|
||||||
|
}),
|
||||||
|
hasMpvWebsocketPlugin: () => true,
|
||||||
|
});
|
||||||
|
|
||||||
|
await runAppReadyRuntime(deps);
|
||||||
|
|
||||||
|
assert.equal(calls.includes('startSubtitleWebsocket:9001'), false);
|
||||||
|
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
||||||
|
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
||||||
|
assert.ok(calls.includes('log:mpv_websocket detected, skipping built-in WebSocket server'));
|
||||||
|
});
|
||||||
|
|
||||||
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
|
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
|
||||||
const { deps, calls } = makeDeps({
|
const { deps, calls } = makeDeps({
|
||||||
shouldSkipHeavyStartup: () => true,
|
shouldSkipHeavyStartup: () => true,
|
||||||
@@ -102,6 +153,7 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
|||||||
|
|
||||||
await runAppReadyRuntime(deps);
|
await runAppReadyRuntime(deps);
|
||||||
|
|
||||||
|
assert.equal(calls.includes('ensureDefaultConfigBootstrap'), true);
|
||||||
assert.equal(calls.includes('reloadConfig'), false);
|
assert.equal(calls.includes('reloadConfig'), false);
|
||||||
assert.equal(calls.includes('getResolvedConfig'), false);
|
assert.equal(calls.includes('getResolvedConfig'), false);
|
||||||
assert.equal(calls.includes('getConfigWarnings'), false);
|
assert.equal(calls.includes('getConfigWarnings'), false);
|
||||||
@@ -116,7 +168,10 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
|||||||
assert.equal(calls.includes('logConfigWarning'), false);
|
assert.equal(calls.includes('logConfigWarning'), false);
|
||||||
assert.equal(calls.includes('handleInitialArgs'), true);
|
assert.equal(calls.includes('handleInitialArgs'), true);
|
||||||
assert.equal(calls.includes('loadYomitanExtension'), true);
|
assert.equal(calls.includes('loadYomitanExtension'), true);
|
||||||
|
assert.equal(calls.includes('handleFirstRunSetup'), true);
|
||||||
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
|
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
|
||||||
|
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleFirstRunSetup'));
|
||||||
|
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
||||||
});
|
});
|
||||||
|
|
||||||
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
|
setup: false,
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
@@ -96,6 +97,9 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
|
|||||||
openYomitanSettingsDelayed: (delayMs) => {
|
openYomitanSettingsDelayed: (delayMs) => {
|
||||||
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
|
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
|
||||||
},
|
},
|
||||||
|
openFirstRunSetup: () => {
|
||||||
|
calls.push('openFirstRunSetup');
|
||||||
|
},
|
||||||
setVisibleOverlayVisible: (visible) => {
|
setVisibleOverlayVisible: (visible) => {
|
||||||
calls.push(`setVisibleOverlayVisible:${visible}`);
|
calls.push(`setVisibleOverlayVisible:${visible}`);
|
||||||
},
|
},
|
||||||
@@ -229,6 +233,16 @@ test('handleCliCommand processes --start for second-instance when overlay runtim
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('handleCliCommand opens first-run setup window for --setup', () => {
|
||||||
|
const { deps, calls } = createDeps();
|
||||||
|
|
||||||
|
handleCliCommand(makeArgs({ setup: true }), 'initial', deps);
|
||||||
|
|
||||||
|
assert.ok(calls.includes('openFirstRunSetup'));
|
||||||
|
assert.ok(calls.includes('log:Opened first-run setup flow.'));
|
||||||
|
assert.equal(calls.includes('openYomitanSettingsDelayed:1000'), false);
|
||||||
|
});
|
||||||
|
|
||||||
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
||||||
const { deps, calls } = createDeps({
|
const { deps, calls } = createDeps({
|
||||||
setLogLevel: (level) => {
|
setLogLevel: (level) => {
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ export interface CliCommandServiceDeps {
|
|||||||
isOverlayRuntimeInitialized: () => boolean;
|
isOverlayRuntimeInitialized: () => boolean;
|
||||||
initializeOverlayRuntime: () => void;
|
initializeOverlayRuntime: () => void;
|
||||||
toggleVisibleOverlay: () => void;
|
toggleVisibleOverlay: () => void;
|
||||||
|
openFirstRunSetup: () => void;
|
||||||
openYomitanSettingsDelayed: (delayMs: number) => void;
|
openYomitanSettingsDelayed: (delayMs: number) => void;
|
||||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
setVisibleOverlayVisible: (visible: boolean) => void;
|
||||||
copyCurrentSubtitle: () => void;
|
copyCurrentSubtitle: () => void;
|
||||||
@@ -115,6 +116,7 @@ interface MiningCliRuntime {
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface UiCliRuntime {
|
interface UiCliRuntime {
|
||||||
|
openFirstRunSetup: () => void;
|
||||||
openYomitanSettings: () => void;
|
openYomitanSettings: () => void;
|
||||||
cycleSecondarySubMode: () => void;
|
cycleSecondarySubMode: () => void;
|
||||||
openRuntimeOptionsPalette: () => void;
|
openRuntimeOptionsPalette: () => void;
|
||||||
@@ -195,6 +197,7 @@ export function createCliCommandDepsRuntime(
|
|||||||
isOverlayRuntimeInitialized: options.overlay.isInitialized,
|
isOverlayRuntimeInitialized: options.overlay.isInitialized,
|
||||||
initializeOverlayRuntime: options.overlay.initialize,
|
initializeOverlayRuntime: options.overlay.initialize,
|
||||||
toggleVisibleOverlay: options.overlay.toggleVisible,
|
toggleVisibleOverlay: options.overlay.toggleVisible,
|
||||||
|
openFirstRunSetup: options.ui.openFirstRunSetup,
|
||||||
openYomitanSettingsDelayed: (delayMs) => {
|
openYomitanSettingsDelayed: (delayMs) => {
|
||||||
options.schedule(() => {
|
options.schedule(() => {
|
||||||
options.ui.openYomitanSettings();
|
options.ui.openYomitanSettings();
|
||||||
@@ -258,7 +261,8 @@ export function handleCliCommand(
|
|||||||
|
|
||||||
const ignoreSecondInstanceStart =
|
const ignoreSecondInstanceStart =
|
||||||
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
|
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
|
||||||
const shouldStart = (!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
|
const shouldStart =
|
||||||
|
(!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
|
||||||
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
|
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
|
||||||
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
|
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
|
||||||
|
|
||||||
@@ -298,6 +302,9 @@ export function handleCliCommand(
|
|||||||
|
|
||||||
if (args.toggle || args.toggleVisibleOverlay) {
|
if (args.toggle || args.toggleVisibleOverlay) {
|
||||||
deps.toggleVisibleOverlay();
|
deps.toggleVisibleOverlay();
|
||||||
|
} else if (args.setup) {
|
||||||
|
deps.openFirstRunSetup();
|
||||||
|
deps.log('Opened first-run setup flow.');
|
||||||
} else if (args.settings) {
|
} else if (args.settings) {
|
||||||
deps.openYomitanSettingsDelayed(1000);
|
deps.openYomitanSettingsDelayed(1000);
|
||||||
} else if (args.show || args.showVisibleOverlay) {
|
} else if (args.show || args.showVisibleOverlay) {
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
|
|||||||
mpvSendCommand: (command) => {
|
mpvSendCommand: (command) => {
|
||||||
sentCommands.push(command);
|
sentCommands.push(command);
|
||||||
},
|
},
|
||||||
|
resolveProxyCommandOsd: async () => null,
|
||||||
isMpvConnected: () => true,
|
isMpvConnected: () => true,
|
||||||
hasRuntimeOptionsManager: () => true,
|
hasRuntimeOptionsManager: () => true,
|
||||||
...overrides,
|
...overrides,
|
||||||
@@ -52,30 +53,39 @@ test('handleMpvCommandFromIpc forwards regular mpv commands', () => {
|
|||||||
assert.deepEqual(osd, []);
|
assert.deepEqual(osd, []);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', () => {
|
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', async () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions();
|
||||||
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
|
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
|
||||||
|
await new Promise((resolve) => setImmediate(resolve));
|
||||||
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
|
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
|
||||||
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
|
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for primary subtitle track keybinding proxies', () => {
|
test('handleMpvCommandFromIpc emits resolved osd for primary subtitle track keybinding proxies', async () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions({
|
||||||
|
resolveProxyCommandOsd: async () => 'Subtitle track: Internal #3 - Japanese (active)',
|
||||||
|
});
|
||||||
handleMpvCommandFromIpc(['cycle', 'sid'], options);
|
handleMpvCommandFromIpc(['cycle', 'sid'], options);
|
||||||
|
await new Promise((resolve) => setImmediate(resolve));
|
||||||
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
|
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
|
||||||
assert.deepEqual(osd, ['Subtitle track: ${sid}']);
|
assert.deepEqual(osd, ['Subtitle track: Internal #3 - Japanese (active)']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding proxies', () => {
|
test('handleMpvCommandFromIpc emits resolved osd for secondary subtitle track keybinding proxies', async () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions({
|
||||||
|
resolveProxyCommandOsd: async () =>
|
||||||
|
'Secondary subtitle track: External #8 - English Commentary',
|
||||||
|
});
|
||||||
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
|
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
|
||||||
|
await new Promise((resolve) => setImmediate(resolve));
|
||||||
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
|
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
|
||||||
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
|
assert.deepEqual(osd, ['Secondary subtitle track: External #8 - English Commentary']);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
|
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', async () => {
|
||||||
const { options, sentCommands, osd } = createOptions();
|
const { options, sentCommands, osd } = createOptions();
|
||||||
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
|
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
|
||||||
|
await new Promise((resolve) => setImmediate(resolve));
|
||||||
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
|
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
|
||||||
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
|
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -23,6 +23,7 @@ export interface HandleMpvCommandFromIpcOptions {
|
|||||||
mpvPlayNextSubtitle: () => void;
|
mpvPlayNextSubtitle: () => void;
|
||||||
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
||||||
mpvSendCommand: (command: (string | number)[]) => void;
|
mpvSendCommand: (command: (string | number)[]) => void;
|
||||||
|
resolveProxyCommandOsd?: (command: (string | number)[]) => Promise<string | null>;
|
||||||
isMpvConnected: () => boolean;
|
isMpvConnected: () => boolean;
|
||||||
hasRuntimeOptionsManager: () => boolean;
|
hasRuntimeOptionsManager: () => boolean;
|
||||||
}
|
}
|
||||||
@@ -36,7 +37,7 @@ const MPV_PROPERTY_COMMANDS = new Set([
|
|||||||
'multiply',
|
'multiply',
|
||||||
]);
|
]);
|
||||||
|
|
||||||
function resolveProxyCommandOsd(command: (string | number)[]): string | null {
|
function resolveProxyCommandOsdTemplate(command: (string | number)[]): string | null {
|
||||||
const operation = typeof command[0] === 'string' ? command[0] : '';
|
const operation = typeof command[0] === 'string' ? command[0] : '';
|
||||||
const property = typeof command[1] === 'string' ? command[1] : '';
|
const property = typeof command[1] === 'string' ? command[1] : '';
|
||||||
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
|
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
|
||||||
@@ -55,6 +56,25 @@ function resolveProxyCommandOsd(command: (string | number)[]): string | null {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function showResolvedProxyCommandOsd(
|
||||||
|
command: (string | number)[],
|
||||||
|
options: HandleMpvCommandFromIpcOptions,
|
||||||
|
): void {
|
||||||
|
const template = resolveProxyCommandOsdTemplate(command);
|
||||||
|
if (!template) return;
|
||||||
|
|
||||||
|
const emit = async () => {
|
||||||
|
try {
|
||||||
|
const resolved = await options.resolveProxyCommandOsd?.(command);
|
||||||
|
options.showMpvOsd(resolved || template);
|
||||||
|
} catch {
|
||||||
|
options.showMpvOsd(template);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
void emit();
|
||||||
|
}
|
||||||
|
|
||||||
export function handleMpvCommandFromIpc(
|
export function handleMpvCommandFromIpc(
|
||||||
command: (string | number)[],
|
command: (string | number)[],
|
||||||
options: HandleMpvCommandFromIpcOptions,
|
options: HandleMpvCommandFromIpcOptions,
|
||||||
@@ -103,10 +123,7 @@ export function handleMpvCommandFromIpc(
|
|||||||
options.mpvPlayNextSubtitle();
|
options.mpvPlayNextSubtitle();
|
||||||
} else {
|
} else {
|
||||||
options.mpvSendCommand(command);
|
options.mpvSendCommand(command);
|
||||||
const osd = resolveProxyCommandOsd(command);
|
showResolvedProxyCommandOsd(command, options);
|
||||||
if (osd) {
|
|
||||||
options.showMpvOsd(osd);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -22,6 +22,22 @@ test('showMpvOsdRuntime sends show-text when connected', () => {
|
|||||||
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
|
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('showMpvOsdRuntime enables property expansion for placeholder-based messages', () => {
|
||||||
|
const commands: (string | number)[][] = [];
|
||||||
|
showMpvOsdRuntime(
|
||||||
|
{
|
||||||
|
connected: true,
|
||||||
|
send: ({ command }) => {
|
||||||
|
commands.push(command);
|
||||||
|
},
|
||||||
|
},
|
||||||
|
'Subtitle delay: ${sub-delay}',
|
||||||
|
);
|
||||||
|
assert.deepEqual(commands, [
|
||||||
|
['expand-properties', 'show-text', 'Subtitle delay: ${sub-delay}', '3000'],
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
test('showMpvOsdRuntime logs fallback when disconnected', () => {
|
test('showMpvOsdRuntime logs fallback when disconnected', () => {
|
||||||
const logs: string[] = [];
|
const logs: string[] = [];
|
||||||
showMpvOsdRuntime(
|
showMpvOsdRuntime(
|
||||||
|
|||||||
@@ -53,7 +53,10 @@ export function showMpvOsdRuntime(
|
|||||||
fallbackLog: (text: string) => void = (line) => logger.info(line),
|
fallbackLog: (text: string) => void = (line) => logger.info(line),
|
||||||
): void {
|
): void {
|
||||||
if (mpvClient && mpvClient.connected) {
|
if (mpvClient && mpvClient.connected) {
|
||||||
mpvClient.send({ command: ['show-text', text, '3000'] });
|
const command = text.includes('${')
|
||||||
|
? ['expand-properties', 'show-text', text, '3000']
|
||||||
|
: ['show-text', text, '3000'];
|
||||||
|
mpvClient.send({ command });
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
fallbackLog(`OSD (MPV not connected): ${text}`);
|
fallbackLog(`OSD (MPV not connected): ${text}`);
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
|||||||
toggle: false,
|
toggle: false,
|
||||||
toggleVisibleOverlay: false,
|
toggleVisibleOverlay: false,
|
||||||
settings: false,
|
settings: false,
|
||||||
|
setup: false,
|
||||||
show: false,
|
show: false,
|
||||||
hide: false,
|
hide: false,
|
||||||
showVisibleOverlay: false,
|
showVisibleOverlay: false,
|
||||||
|
|||||||
@@ -69,6 +69,13 @@ export function runStartupBootstrapRuntime(
|
|||||||
}
|
}
|
||||||
|
|
||||||
interface AppReadyConfigLike {
|
interface AppReadyConfigLike {
|
||||||
|
annotationWebsocket?: {
|
||||||
|
enabled?: boolean;
|
||||||
|
port?: number;
|
||||||
|
};
|
||||||
|
texthooker?: {
|
||||||
|
launchAtStartup?: boolean;
|
||||||
|
};
|
||||||
secondarySub?: {
|
secondarySub?: {
|
||||||
defaultMode?: SecondarySubMode;
|
defaultMode?: SecondarySubMode;
|
||||||
};
|
};
|
||||||
@@ -92,6 +99,7 @@ interface AppReadyConfigLike {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface AppReadyRuntimeDeps {
|
export interface AppReadyRuntimeDeps {
|
||||||
|
ensureDefaultConfigBootstrap: () => void;
|
||||||
loadSubtitlePosition: () => void;
|
loadSubtitlePosition: () => void;
|
||||||
resolveKeybindings: () => void;
|
resolveKeybindings: () => void;
|
||||||
createMpvClient: () => void;
|
createMpvClient: () => void;
|
||||||
@@ -104,14 +112,19 @@ export interface AppReadyRuntimeDeps {
|
|||||||
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
||||||
defaultSecondarySubMode: SecondarySubMode;
|
defaultSecondarySubMode: SecondarySubMode;
|
||||||
defaultWebsocketPort: number;
|
defaultWebsocketPort: number;
|
||||||
|
defaultAnnotationWebsocketPort: number;
|
||||||
|
defaultTexthookerPort: number;
|
||||||
hasMpvWebsocketPlugin: () => boolean;
|
hasMpvWebsocketPlugin: () => boolean;
|
||||||
startSubtitleWebsocket: (port: number) => void;
|
startSubtitleWebsocket: (port: number) => void;
|
||||||
|
startAnnotationWebsocket: (port: number) => void;
|
||||||
|
startTexthooker: (port: number, websocketUrl?: string) => void;
|
||||||
log: (message: string) => void;
|
log: (message: string) => void;
|
||||||
createMecabTokenizerAndCheck: () => Promise<void>;
|
createMecabTokenizerAndCheck: () => Promise<void>;
|
||||||
createSubtitleTimingTracker: () => void;
|
createSubtitleTimingTracker: () => void;
|
||||||
createImmersionTracker?: () => void;
|
createImmersionTracker?: () => void;
|
||||||
startJellyfinRemoteSession?: () => Promise<void>;
|
startJellyfinRemoteSession?: () => Promise<void>;
|
||||||
loadYomitanExtension: () => Promise<void>;
|
loadYomitanExtension: () => Promise<void>;
|
||||||
|
handleFirstRunSetup: () => Promise<void>;
|
||||||
prewarmSubtitleDictionaries?: () => Promise<void>;
|
prewarmSubtitleDictionaries?: () => Promise<void>;
|
||||||
startBackgroundWarmups: () => void;
|
startBackgroundWarmups: () => void;
|
||||||
texthookerOnlyMode: boolean;
|
texthookerOnlyMode: boolean;
|
||||||
@@ -169,8 +182,10 @@ export function isAutoUpdateEnabledRuntime(
|
|||||||
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
|
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
|
||||||
const now = deps.now ?? (() => Date.now());
|
const now = deps.now ?? (() => Date.now());
|
||||||
const startupStartedAtMs = now();
|
const startupStartedAtMs = now();
|
||||||
|
deps.ensureDefaultConfigBootstrap();
|
||||||
if (deps.shouldSkipHeavyStartup?.()) {
|
if (deps.shouldSkipHeavyStartup?.()) {
|
||||||
await deps.loadYomitanExtension();
|
await deps.loadYomitanExtension();
|
||||||
|
await deps.handleFirstRunSetup();
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -179,6 +194,7 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
|
|
||||||
if (deps.shouldSkipHeavyStartup?.()) {
|
if (deps.shouldSkipHeavyStartup?.()) {
|
||||||
await deps.loadYomitanExtension();
|
await deps.loadYomitanExtension();
|
||||||
|
await deps.handleFirstRunSetup();
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||||
return;
|
return;
|
||||||
@@ -210,6 +226,11 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
const wsConfig = config.websocket || {};
|
const wsConfig = config.websocket || {};
|
||||||
const wsEnabled = wsConfig.enabled ?? 'auto';
|
const wsEnabled = wsConfig.enabled ?? 'auto';
|
||||||
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
|
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
|
||||||
|
const annotationWsConfig = config.annotationWebsocket || {};
|
||||||
|
const annotationWsEnabled = annotationWsConfig.enabled !== false;
|
||||||
|
const annotationWsPort = annotationWsConfig.port || deps.defaultAnnotationWebsocketPort;
|
||||||
|
const texthookerPort = deps.defaultTexthookerPort;
|
||||||
|
let texthookerWebsocketUrl: string | undefined;
|
||||||
|
|
||||||
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
||||||
deps.startSubtitleWebsocket(wsPort);
|
deps.startSubtitleWebsocket(wsPort);
|
||||||
@@ -217,6 +238,17 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
|
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (annotationWsEnabled) {
|
||||||
|
deps.startAnnotationWebsocket(annotationWsPort);
|
||||||
|
texthookerWebsocketUrl = `ws://127.0.0.1:${annotationWsPort}`;
|
||||||
|
} else if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
||||||
|
texthookerWebsocketUrl = `ws://127.0.0.1:${wsPort}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (config.texthooker?.launchAtStartup !== false) {
|
||||||
|
deps.startTexthooker(texthookerPort, texthookerWebsocketUrl);
|
||||||
|
}
|
||||||
|
|
||||||
deps.createSubtitleTimingTracker();
|
deps.createSubtitleTimingTracker();
|
||||||
if (deps.createImmersionTracker) {
|
if (deps.createImmersionTracker) {
|
||||||
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
|
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
|
||||||
@@ -233,6 +265,8 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
|||||||
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
|
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await deps.loadYomitanExtension();
|
||||||
|
await deps.handleFirstRunSetup();
|
||||||
deps.handleInitialArgs();
|
deps.handleInitialArgs();
|
||||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,10 @@
|
|||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import { serializeSubtitleMarkup, serializeSubtitleWebsocketMessage } from './subtitle-ws';
|
import {
|
||||||
|
serializeInitialSubtitleWebsocketMessage,
|
||||||
|
serializeSubtitleMarkup,
|
||||||
|
serializeSubtitleWebsocketMessage,
|
||||||
|
} from './subtitle-ws';
|
||||||
import { PartOfSpeech, type SubtitleData } from '../../types';
|
import { PartOfSpeech, type SubtitleData } from '../../types';
|
||||||
|
|
||||||
const frequencyOptions = {
|
const frequencyOptions = {
|
||||||
@@ -78,6 +82,51 @@ test('serializeSubtitleMarkup includes known, n+1, jlpt, and frequency classes',
|
|||||||
assert.match(markup, /word word-frequency-band-1/);
|
assert.match(markup, /word word-frequency-band-1/);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence', () => {
|
||||||
|
const payload: SubtitleData = {
|
||||||
|
text: 'ignored',
|
||||||
|
tokens: [
|
||||||
|
{
|
||||||
|
surface: '無事',
|
||||||
|
reading: 'ぶじ',
|
||||||
|
headword: '無事',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: true,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
jlptLevel: 'N2',
|
||||||
|
frequencyRank: 745,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
surface: 'アレクシア',
|
||||||
|
reading: 'あれくしあ',
|
||||||
|
headword: 'アレクシア',
|
||||||
|
startPos: 2,
|
||||||
|
endPos: 7,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: false,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
isNameMatch: true,
|
||||||
|
frequencyRank: 12,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const markup = serializeSubtitleMarkup(payload, frequencyOptions);
|
||||||
|
assert.match(
|
||||||
|
markup,
|
||||||
|
/<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事<\/span>/,
|
||||||
|
);
|
||||||
|
assert.match(
|
||||||
|
markup,
|
||||||
|
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
|
||||||
|
);
|
||||||
|
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
|
||||||
|
});
|
||||||
|
|
||||||
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
||||||
const payload: SubtitleData = {
|
const payload: SubtitleData = {
|
||||||
text: '字幕',
|
text: '字幕',
|
||||||
@@ -85,5 +134,101 @@ test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||||
assert.deepEqual(JSON.parse(raw), { sentence: '字幕' });
|
assert.deepEqual(JSON.parse(raw), {
|
||||||
|
version: 1,
|
||||||
|
text: '字幕',
|
||||||
|
sentence: '字幕',
|
||||||
|
tokens: [],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('serializeSubtitleWebsocketMessage emits structured token api payload', () => {
|
||||||
|
const payload: SubtitleData = {
|
||||||
|
text: '無事',
|
||||||
|
tokens: [
|
||||||
|
{
|
||||||
|
surface: '無事',
|
||||||
|
reading: 'ぶじ',
|
||||||
|
headword: '無事',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: true,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
jlptLevel: 'N2',
|
||||||
|
frequencyRank: 745,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||||
|
assert.deepEqual(JSON.parse(raw), {
|
||||||
|
version: 1,
|
||||||
|
text: '無事',
|
||||||
|
sentence:
|
||||||
|
'<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事</span>',
|
||||||
|
tokens: [
|
||||||
|
{
|
||||||
|
surface: '無事',
|
||||||
|
reading: 'ぶじ',
|
||||||
|
headword: '無事',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: true,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
isNameMatch: false,
|
||||||
|
jlptLevel: 'N2',
|
||||||
|
frequencyRank: 745,
|
||||||
|
className: 'word word-known word-jlpt-n2',
|
||||||
|
frequencyRankLabel: '745',
|
||||||
|
jlptLevelLabel: 'N2',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('serializeInitialSubtitleWebsocketMessage keeps annotated current subtitle content', () => {
|
||||||
|
const payload: SubtitleData = {
|
||||||
|
text: 'ignored fallback',
|
||||||
|
tokens: [
|
||||||
|
{
|
||||||
|
surface: '既知',
|
||||||
|
reading: '',
|
||||||
|
headword: '',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: true,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const raw = serializeInitialSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||||
|
assert.deepEqual(JSON.parse(raw ?? ''), {
|
||||||
|
version: 1,
|
||||||
|
text: 'ignored fallback',
|
||||||
|
sentence: '<span class="word word-known">既知</span>',
|
||||||
|
tokens: [
|
||||||
|
{
|
||||||
|
surface: '既知',
|
||||||
|
reading: '',
|
||||||
|
headword: '',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
isMerged: false,
|
||||||
|
isKnown: true,
|
||||||
|
isNPlusOneTarget: false,
|
||||||
|
isNameMatch: false,
|
||||||
|
className: 'word word-known',
|
||||||
|
frequencyRankLabel: null,
|
||||||
|
jlptLevelLabel: null,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -18,6 +18,26 @@ export type SubtitleWebsocketFrequencyOptions = {
|
|||||||
mode: 'single' | 'banded';
|
mode: 'single' | 'banded';
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type SerializedSubtitleToken = Pick<
|
||||||
|
MergedToken,
|
||||||
|
| 'surface'
|
||||||
|
| 'reading'
|
||||||
|
| 'headword'
|
||||||
|
| 'startPos'
|
||||||
|
| 'endPos'
|
||||||
|
| 'partOfSpeech'
|
||||||
|
| 'isMerged'
|
||||||
|
| 'isKnown'
|
||||||
|
| 'isNPlusOneTarget'
|
||||||
|
| 'frequencyRank'
|
||||||
|
| 'jlptLevel'
|
||||||
|
> & {
|
||||||
|
isNameMatch: boolean;
|
||||||
|
className: string;
|
||||||
|
frequencyRankLabel: string | null;
|
||||||
|
jlptLevelLabel: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
function escapeHtml(text: string): string {
|
function escapeHtml(text: string): string {
|
||||||
return text
|
return text
|
||||||
.replaceAll('&', '&')
|
.replaceAll('&', '&')
|
||||||
@@ -46,11 +66,29 @@ function computeFrequencyClass(
|
|||||||
return 'word-frequency-single';
|
return 'word-frequency-single';
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getFrequencyRankLabel(
|
||||||
|
token: MergedToken,
|
||||||
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
|
): string | null {
|
||||||
|
if (!options.enabled) return null;
|
||||||
|
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
|
||||||
|
|
||||||
|
const rank = Math.max(1, Math.floor(token.frequencyRank));
|
||||||
|
const topX = Math.max(1, Math.floor(options.topX));
|
||||||
|
return rank <= topX ? String(rank) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getJlptLevelLabel(token: MergedToken): string | null {
|
||||||
|
return token.jlptLevel ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
||||||
const classes = ['word'];
|
const classes = ['word'];
|
||||||
|
|
||||||
if (token.isNPlusOneTarget) {
|
if (token.isNPlusOneTarget) {
|
||||||
classes.push('word-n-plus-one');
|
classes.push('word-n-plus-one');
|
||||||
|
} else if (token.isNameMatch) {
|
||||||
|
classes.push('word-name-match');
|
||||||
} else if (token.isKnown) {
|
} else if (token.isKnown) {
|
||||||
classes.push('word-known');
|
classes.push('word-known');
|
||||||
}
|
}
|
||||||
@@ -59,7 +97,7 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
|||||||
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!token.isKnown && !token.isNPlusOneTarget) {
|
if (!token.isKnown && !token.isNPlusOneTarget && !token.isNameMatch) {
|
||||||
const frequencyClass = computeFrequencyClass(token, options);
|
const frequencyClass = computeFrequencyClass(token, options);
|
||||||
if (frequencyClass) {
|
if (frequencyClass) {
|
||||||
classes.push(frequencyClass);
|
classes.push(frequencyClass);
|
||||||
@@ -69,6 +107,55 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
|||||||
return classes.join(' ');
|
return classes.join(' ');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function serializeWordDataAttributes(
|
||||||
|
token: MergedToken,
|
||||||
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
|
): string {
|
||||||
|
const attributes: string[] = [];
|
||||||
|
|
||||||
|
if (token.reading) {
|
||||||
|
attributes.push(`data-reading="${escapeHtml(token.reading)}"`);
|
||||||
|
}
|
||||||
|
if (token.headword) {
|
||||||
|
attributes.push(`data-headword="${escapeHtml(token.headword)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const frequencyRankLabel = getFrequencyRankLabel(token, options);
|
||||||
|
if (frequencyRankLabel) {
|
||||||
|
attributes.push(`data-frequency-rank="${escapeHtml(frequencyRankLabel)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const jlptLevelLabel = getJlptLevelLabel(token);
|
||||||
|
if (jlptLevelLabel) {
|
||||||
|
attributes.push(`data-jlpt-level="${escapeHtml(jlptLevelLabel)}"`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return attributes.length > 0 ? ` ${attributes.join(' ')}` : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function serializeSubtitleToken(
|
||||||
|
token: MergedToken,
|
||||||
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
|
): SerializedSubtitleToken {
|
||||||
|
return {
|
||||||
|
surface: token.surface,
|
||||||
|
reading: token.reading,
|
||||||
|
headword: token.headword,
|
||||||
|
startPos: token.startPos,
|
||||||
|
endPos: token.endPos,
|
||||||
|
partOfSpeech: token.partOfSpeech,
|
||||||
|
isMerged: token.isMerged,
|
||||||
|
isKnown: token.isKnown,
|
||||||
|
isNPlusOneTarget: token.isNPlusOneTarget,
|
||||||
|
isNameMatch: token.isNameMatch ?? false,
|
||||||
|
jlptLevel: token.jlptLevel,
|
||||||
|
frequencyRank: token.frequencyRank,
|
||||||
|
className: computeWordClass(token, options),
|
||||||
|
frequencyRankLabel: getFrequencyRankLabel(token, options),
|
||||||
|
jlptLevelLabel: getJlptLevelLabel(token),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
export function serializeSubtitleMarkup(
|
export function serializeSubtitleMarkup(
|
||||||
payload: SubtitleData,
|
payload: SubtitleData,
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
@@ -80,11 +167,12 @@ export function serializeSubtitleMarkup(
|
|||||||
const chunks: string[] = [];
|
const chunks: string[] = [];
|
||||||
for (const token of payload.tokens) {
|
for (const token of payload.tokens) {
|
||||||
const klass = computeWordClass(token, options);
|
const klass = computeWordClass(token, options);
|
||||||
|
const attrs = serializeWordDataAttributes(token, options);
|
||||||
const parts = token.surface.split('\n');
|
const parts = token.surface.split('\n');
|
||||||
for (let index = 0; index < parts.length; index += 1) {
|
for (let index = 0; index < parts.length; index += 1) {
|
||||||
const part = parts[index];
|
const part = parts[index];
|
||||||
if (part) {
|
if (part) {
|
||||||
chunks.push(`<span class="${klass}">${escapeHtml(part)}</span>`);
|
chunks.push(`<span class="${klass}"${attrs}>${escapeHtml(part)}</span>`);
|
||||||
}
|
}
|
||||||
if (index < parts.length - 1) {
|
if (index < parts.length - 1) {
|
||||||
chunks.push('<br>');
|
chunks.push('<br>');
|
||||||
@@ -99,7 +187,23 @@ export function serializeSubtitleWebsocketMessage(
|
|||||||
payload: SubtitleData,
|
payload: SubtitleData,
|
||||||
options: SubtitleWebsocketFrequencyOptions,
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
): string {
|
): string {
|
||||||
return JSON.stringify({ sentence: serializeSubtitleMarkup(payload, options) });
|
return JSON.stringify({
|
||||||
|
version: 1,
|
||||||
|
text: payload.text,
|
||||||
|
sentence: serializeSubtitleMarkup(payload, options),
|
||||||
|
tokens: payload.tokens?.map((token) => serializeSubtitleToken(token, options)) ?? [],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function serializeInitialSubtitleWebsocketMessage(
|
||||||
|
payload: SubtitleData | null,
|
||||||
|
options: SubtitleWebsocketFrequencyOptions,
|
||||||
|
): string | null {
|
||||||
|
if (!payload || !payload.text.trim()) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return serializeSubtitleWebsocketMessage(payload, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
export class SubtitleWebSocket {
|
export class SubtitleWebSocket {
|
||||||
@@ -114,7 +218,11 @@ export class SubtitleWebSocket {
|
|||||||
return (this.server?.clients.size ?? 0) > 0;
|
return (this.server?.clients.size ?? 0) > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public start(port: number, getCurrentSubtitleText: () => string): void {
|
public start(
|
||||||
|
port: number,
|
||||||
|
getCurrentSubtitleData: () => SubtitleData | null,
|
||||||
|
getFrequencyOptions: () => SubtitleWebsocketFrequencyOptions,
|
||||||
|
): void {
|
||||||
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
|
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
|
||||||
|
|
||||||
this.server.on('connection', (ws: WebSocket) => {
|
this.server.on('connection', (ws: WebSocket) => {
|
||||||
@@ -124,9 +232,12 @@ export class SubtitleWebSocket {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentText = getCurrentSubtitleText();
|
const currentMessage = serializeInitialSubtitleWebsocketMessage(
|
||||||
if (currentText) {
|
getCurrentSubtitleData(),
|
||||||
ws.send(JSON.stringify({ sentence: currentText }));
|
getFrequencyOptions(),
|
||||||
|
);
|
||||||
|
if (currentMessage) {
|
||||||
|
ws.send(currentMessage);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
27
src/core/services/texthooker.test.ts
Normal file
27
src/core/services/texthooker.test.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import test from 'node:test';
|
||||||
|
import { injectTexthookerBootstrapHtml } from './texthooker';
|
||||||
|
|
||||||
|
test('injectTexthookerBootstrapHtml injects websocket bootstrap before head close', () => {
|
||||||
|
const html = '<html><head><title>Texthooker</title></head><body></body></html>';
|
||||||
|
|
||||||
|
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678');
|
||||||
|
|
||||||
|
assert.match(
|
||||||
|
actual,
|
||||||
|
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
|
||||||
|
);
|
||||||
|
assert.ok(actual.indexOf('</script></head>') !== -1);
|
||||||
|
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
|
||||||
|
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
|
||||||
|
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
|
||||||
|
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));
|
||||||
|
assert.ok(!actual.includes('bannou-texthooker-enableFrequencyColoring'));
|
||||||
|
assert.ok(!actual.includes('bannou-texthooker-enableJlptColoring'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('injectTexthookerBootstrapHtml leaves html unchanged without websocketUrl', () => {
|
||||||
|
const html = '<html><head></head><body></body></html>';
|
||||||
|
|
||||||
|
assert.equal(injectTexthookerBootstrapHtml(html), html);
|
||||||
|
});
|
||||||
@@ -5,6 +5,22 @@ import { createLogger } from '../../logger';
|
|||||||
|
|
||||||
const logger = createLogger('main:texthooker');
|
const logger = createLogger('main:texthooker');
|
||||||
|
|
||||||
|
export function injectTexthookerBootstrapHtml(html: string, websocketUrl?: string): string {
|
||||||
|
if (!websocketUrl) {
|
||||||
|
return html;
|
||||||
|
}
|
||||||
|
|
||||||
|
const bootstrapScript = `<script>window.localStorage.setItem('bannou-texthooker-websocketUrl', ${JSON.stringify(
|
||||||
|
websocketUrl,
|
||||||
|
)});</script>`;
|
||||||
|
|
||||||
|
if (html.includes('</head>')) {
|
||||||
|
return html.replace('</head>', `${bootstrapScript}</head>`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${bootstrapScript}${html}`;
|
||||||
|
}
|
||||||
|
|
||||||
export class Texthooker {
|
export class Texthooker {
|
||||||
private server: http.Server | null = null;
|
private server: http.Server | null = null;
|
||||||
|
|
||||||
@@ -12,7 +28,11 @@ export class Texthooker {
|
|||||||
return this.server !== null;
|
return this.server !== null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public start(port: number): http.Server | null {
|
public start(port: number, websocketUrl?: string): http.Server | null {
|
||||||
|
if (this.server) {
|
||||||
|
return this.server;
|
||||||
|
}
|
||||||
|
|
||||||
const texthookerPath = this.getTexthookerPath();
|
const texthookerPath = this.getTexthookerPath();
|
||||||
if (!texthookerPath) {
|
if (!texthookerPath) {
|
||||||
logger.error('texthooker-ui not found');
|
logger.error('texthooker-ui not found');
|
||||||
@@ -42,8 +62,12 @@ export class Texthooker {
|
|||||||
res.end('Not found');
|
res.end('Not found');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
const responseData =
|
||||||
|
urlPath === '/' || urlPath === '/index.html'
|
||||||
|
? Buffer.from(injectTexthookerBootstrapHtml(data.toString('utf-8'), websocketUrl))
|
||||||
|
: data;
|
||||||
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
|
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
|
||||||
res.end(data);
|
res.end(responseData);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1861,9 +1861,9 @@ test('tokenizeSubtitle keeps parsing explicit by scanning-parser source only', a
|
|||||||
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
|
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', async () => {
|
test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomitan tokens', async () => {
|
||||||
const result = await tokenizeSubtitle(
|
const result = await tokenizeSubtitle(
|
||||||
'小園に',
|
'小園友達',
|
||||||
makeDeps({
|
makeDeps({
|
||||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||||
getYomitanParserWindow: () =>
|
getYomitanParserWindow: () =>
|
||||||
@@ -1884,9 +1884,9 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
|
|||||||
],
|
],
|
||||||
[
|
[
|
||||||
{
|
{
|
||||||
text: 'に',
|
text: '友達',
|
||||||
reading: 'に',
|
reading: 'ともだち',
|
||||||
headwords: [[{ term: 'に' }]],
|
headwords: [[{ term: '友達' }]],
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
],
|
],
|
||||||
@@ -1895,7 +1895,7 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
|
|||||||
},
|
},
|
||||||
}) as unknown as Electron.BrowserWindow,
|
}) as unknown as Electron.BrowserWindow,
|
||||||
getFrequencyDictionaryEnabled: () => true,
|
getFrequencyDictionaryEnabled: () => true,
|
||||||
getFrequencyRank: (text) => (text === '小園' ? 75 : text === 'に' ? 3000 : null),
|
getFrequencyRank: (text) => (text === '小園' ? 75 : text === '友達' ? 3000 : null),
|
||||||
isKnownWord: (text) => text === '小園',
|
isKnownWord: (text) => text === '小園',
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
@@ -2635,6 +2635,21 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque
|
|||||||
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
|
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('tokenizeSubtitle excludes single-kana merged tokens from frequency highlighting', async () => {
|
||||||
|
const result = await tokenizeSubtitle(
|
||||||
|
'た',
|
||||||
|
makeDepsFromYomitanTokens([{ surface: 'た', reading: 'た', headword: 'た' }], {
|
||||||
|
getFrequencyDictionaryEnabled: () => true,
|
||||||
|
getFrequencyRank: (text) => (text === 'た' ? 17 : null),
|
||||||
|
getMinSentenceWordsForNPlusOne: () => 1,
|
||||||
|
tokenizeWithMecab: async () => null,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(result.tokens?.length, 1);
|
||||||
|
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
|
||||||
|
});
|
||||||
|
|
||||||
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
|
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
|
||||||
const result = await tokenizeSubtitle(
|
const result = await tokenizeSubtitle(
|
||||||
'になれば',
|
'になれば',
|
||||||
|
|||||||
@@ -252,12 +252,12 @@ test('annotateTokens applies configured pos1 exclusions to both frequency and N+
|
|||||||
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
|
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
|
||||||
const tokens = [
|
const tokens = [
|
||||||
makeToken({
|
makeToken({
|
||||||
surface: 'は',
|
surface: 'まで',
|
||||||
headword: 'は',
|
headword: 'まで',
|
||||||
partOfSpeech: PartOfSpeech.other,
|
partOfSpeech: PartOfSpeech.other,
|
||||||
pos1: '助詞',
|
pos1: '助詞',
|
||||||
startPos: 0,
|
startPos: 0,
|
||||||
endPos: 1,
|
endPos: 2,
|
||||||
frequencyRank: 8,
|
frequencyRank: 8,
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
@@ -314,6 +314,52 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag
|
|||||||
assert.equal(result[0]?.frequencyRank, undefined);
|
assert.equal(result[0]?.frequencyRank, undefined);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('annotateTokens excludes single hiragana and katakana tokens from frequency when POS tags are missing', () => {
|
||||||
|
const tokens = [
|
||||||
|
makeToken({
|
||||||
|
surface: 'た',
|
||||||
|
reading: 'た',
|
||||||
|
headword: 'た',
|
||||||
|
pos1: '',
|
||||||
|
pos2: '',
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
frequencyRank: 21,
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 1,
|
||||||
|
}),
|
||||||
|
makeToken({
|
||||||
|
surface: 'ア',
|
||||||
|
reading: 'ア',
|
||||||
|
headword: 'ア',
|
||||||
|
pos1: '',
|
||||||
|
pos2: '',
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
frequencyRank: 22,
|
||||||
|
startPos: 1,
|
||||||
|
endPos: 2,
|
||||||
|
}),
|
||||||
|
makeToken({
|
||||||
|
surface: '山',
|
||||||
|
reading: 'やま',
|
||||||
|
headword: '山',
|
||||||
|
pos1: '',
|
||||||
|
pos2: '',
|
||||||
|
partOfSpeech: PartOfSpeech.other,
|
||||||
|
frequencyRank: 23,
|
||||||
|
startPos: 2,
|
||||||
|
endPos: 3,
|
||||||
|
}),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = annotateTokens(tokens, makeDeps(), {
|
||||||
|
minSentenceWordsForNPlusOne: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(result[0]?.frequencyRank, undefined);
|
||||||
|
assert.equal(result[1]?.frequencyRank, undefined);
|
||||||
|
assert.equal(result[2]?.frequencyRank, 23);
|
||||||
|
});
|
||||||
|
|
||||||
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
|
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
|
||||||
const tokens = [
|
const tokens = [
|
||||||
makeToken({
|
makeToken({
|
||||||
|
|||||||
@@ -103,6 +103,10 @@ function isFrequencyExcludedByPos(
|
|||||||
pos1Exclusions: ReadonlySet<string>,
|
pos1Exclusions: ReadonlySet<string>,
|
||||||
pos2Exclusions: ReadonlySet<string>,
|
pos2Exclusions: ReadonlySet<string>,
|
||||||
): boolean {
|
): boolean {
|
||||||
|
if (isSingleKanaFrequencyNoiseToken(token.surface)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
const normalizedPos1 = normalizePos1Tag(token.pos1);
|
const normalizedPos1 = normalizePos1Tag(token.pos1);
|
||||||
const hasPos1 = normalizedPos1.length > 0;
|
const hasPos1 = normalizedPos1.length > 0;
|
||||||
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
|
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
|
||||||
@@ -363,6 +367,20 @@ function isLikelyFrequencyNoiseToken(token: MergedToken): boolean {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isSingleKanaFrequencyNoiseToken(text: string | undefined): boolean {
|
||||||
|
if (typeof text !== 'string') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = text.trim();
|
||||||
|
if (!normalized) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const chars = [...normalized];
|
||||||
|
return chars.length === 1 && isKanaChar(chars[0]!);
|
||||||
|
}
|
||||||
|
|
||||||
function isJlptEligibleToken(token: MergedToken): boolean {
|
function isJlptEligibleToken(token: MergedToken): boolean {
|
||||||
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
|
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
|
||||||
return false;
|
return false;
|
||||||
|
|||||||
@@ -643,6 +643,173 @@ test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary al
|
|||||||
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
|
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('requestYomitanScanTokens skips fallback fragments without exact primary source matches', async () => {
|
||||||
|
const deps = createDeps(async (script) => {
|
||||||
|
if (script.includes('optionsGetFull')) {
|
||||||
|
return {
|
||||||
|
profileCurrent: 0,
|
||||||
|
profiles: [
|
||||||
|
{
|
||||||
|
options: {
|
||||||
|
scanning: { length: 40 },
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return await runInjectedYomitanScript(script, (action, params) => {
|
||||||
|
if (action !== 'termsFind') {
|
||||||
|
throw new Error(`unexpected action: ${action}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const text = (params as { text?: string } | undefined)?.text ?? '';
|
||||||
|
if (text.startsWith('だが ')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 2,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: 'だが',
|
||||||
|
reading: 'だが',
|
||||||
|
sources: [{ originalText: 'だが', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (text.startsWith('それでも')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 4,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: 'それでも',
|
||||||
|
reading: 'それでも',
|
||||||
|
sources: [{ originalText: 'それでも', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (text.startsWith('届かぬ')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 3,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: '届く',
|
||||||
|
reading: 'とどく',
|
||||||
|
sources: [{ originalText: '届かぬ', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (text.startsWith('高み')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 2,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: '高み',
|
||||||
|
reading: 'たかみ',
|
||||||
|
sources: [{ originalText: '高み', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (text.startsWith('があった')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 2,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: 'があ',
|
||||||
|
reading: '',
|
||||||
|
sources: [{ originalText: 'が', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
if (text.startsWith('あった')) {
|
||||||
|
return {
|
||||||
|
originalTextLength: 3,
|
||||||
|
dictionaryEntries: [
|
||||||
|
{
|
||||||
|
headwords: [
|
||||||
|
{
|
||||||
|
term: 'ある',
|
||||||
|
reading: 'ある',
|
||||||
|
sources: [{ originalText: 'あった', isPrimary: true, matchType: 'exact' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return { originalTextLength: 0, dictionaryEntries: [] };
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await requestYomitanScanTokens('だが それでも届かぬ高みがあった', deps, {
|
||||||
|
error: () => undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(
|
||||||
|
result?.map((token) => ({
|
||||||
|
surface: token.surface,
|
||||||
|
headword: token.headword,
|
||||||
|
startPos: token.startPos,
|
||||||
|
endPos: token.endPos,
|
||||||
|
})),
|
||||||
|
[
|
||||||
|
{
|
||||||
|
surface: 'だが',
|
||||||
|
headword: 'だが',
|
||||||
|
startPos: 0,
|
||||||
|
endPos: 2,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
surface: 'それでも',
|
||||||
|
headword: 'それでも',
|
||||||
|
startPos: 3,
|
||||||
|
endPos: 7,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
surface: '届かぬ',
|
||||||
|
headword: '届く',
|
||||||
|
startPos: 7,
|
||||||
|
endPos: 10,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
surface: '高み',
|
||||||
|
headword: '高み',
|
||||||
|
startPos: 10,
|
||||||
|
endPos: 12,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
surface: 'あった',
|
||||||
|
headword: 'ある',
|
||||||
|
startPos: 13,
|
||||||
|
endPos: 16,
|
||||||
|
},
|
||||||
|
],
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
|
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
|
||||||
let scriptValue = '';
|
let scriptValue = '';
|
||||||
const deps = createDeps(async (script) => {
|
const deps = createDeps(async (script) => {
|
||||||
@@ -656,7 +823,7 @@ test('getYomitanDictionaryInfo requests dictionary info via backend action', asy
|
|||||||
assert.match(scriptValue, /getDictionaryInfo/);
|
assert.match(scriptValue, /getDictionaryInfo/);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('dictionary settings helpers upsert and remove dictionary entries', async () => {
|
test('dictionary settings helpers upsert and remove dictionary entries without reordering', async () => {
|
||||||
const scripts: string[] = [];
|
const scripts: string[] = [];
|
||||||
const optionsFull = {
|
const optionsFull = {
|
||||||
profileCurrent: 0,
|
profileCurrent: 0,
|
||||||
@@ -706,7 +873,8 @@ test('dictionary settings helpers upsert and remove dictionary entries', async (
|
|||||||
|
|
||||||
const upsertScript = scripts.find(
|
const upsertScript = scripts.find(
|
||||||
(script) =>
|
(script) =>
|
||||||
script.includes('setAllSettings') && script.includes('"SubMiner Character Dictionary (AniList 1)"'),
|
script.includes('setAllSettings') &&
|
||||||
|
script.includes('"SubMiner Character Dictionary (AniList 1)"'),
|
||||||
);
|
);
|
||||||
assert.ok(upsertScript);
|
assert.ok(upsertScript);
|
||||||
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
|
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
|
||||||
@@ -746,9 +914,18 @@ test('importYomitanDictionaryFromZip uses settings automation bridge instead of
|
|||||||
});
|
});
|
||||||
|
|
||||||
assert.equal(imported, true);
|
assert.equal(imported, true);
|
||||||
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
|
assert.equal(
|
||||||
assert.equal(scripts.some((script) => script.includes('importDictionaryArchiveBase64')), true);
|
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
||||||
assert.equal(scripts.some((script) => script.includes('subminerImportDictionary')), false);
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
scripts.some((script) => script.includes('importDictionaryArchiveBase64')),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
scripts.some((script) => script.includes('subminerImportDictionary')),
|
||||||
|
false,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
|
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
|
||||||
@@ -778,7 +955,16 @@ test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of
|
|||||||
);
|
);
|
||||||
|
|
||||||
assert.equal(deleted, true);
|
assert.equal(deleted, true);
|
||||||
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
|
assert.equal(
|
||||||
assert.equal(scripts.some((script) => script.includes('deleteDictionary')), true);
|
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
||||||
assert.equal(scripts.some((script) => script.includes('subminerDeleteDictionary')), false);
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
scripts.some((script) => script.includes('deleteDictionary')),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
scripts.some((script) => script.includes('subminerDeleteDictionary')),
|
||||||
|
false,
|
||||||
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -562,9 +562,7 @@ async function createYomitanExtensionWindow(
|
|||||||
});
|
});
|
||||||
return window;
|
return window;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error(
|
logger.error(`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`);
|
||||||
`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`,
|
|
||||||
);
|
|
||||||
if (!window.isDestroyed()) {
|
if (!window.isDestroyed()) {
|
||||||
window.destroy();
|
window.destroy();
|
||||||
}
|
}
|
||||||
@@ -843,14 +841,7 @@ const YOMITAN_SCANNING_HELPERS = String.raw`
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const fallback = dictionaryEntries?.[0]?.headwords?.[0];
|
return null;
|
||||||
return fallback
|
|
||||||
? {
|
|
||||||
term: fallback.term,
|
|
||||||
reading: fallback.reading,
|
|
||||||
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntries?.[0])
|
|
||||||
}
|
|
||||||
: null;
|
|
||||||
}
|
}
|
||||||
`;
|
`;
|
||||||
|
|
||||||
@@ -1050,13 +1041,15 @@ export async function requestYomitanScanTokens(
|
|||||||
}
|
}
|
||||||
if (Array.isArray(rawResult)) {
|
if (Array.isArray(rawResult)) {
|
||||||
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
|
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
|
||||||
return selectedTokens?.map((token) => ({
|
return (
|
||||||
|
selectedTokens?.map((token) => ({
|
||||||
surface: token.surface,
|
surface: token.surface,
|
||||||
reading: token.reading,
|
reading: token.reading,
|
||||||
headword: token.headword,
|
headword: token.headword,
|
||||||
startPos: token.startPos,
|
startPos: token.startPos,
|
||||||
endPos: token.endPos,
|
endPos: token.endPos,
|
||||||
})) ?? null;
|
})) ?? null
|
||||||
|
);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@@ -1530,7 +1523,12 @@ export async function getYomitanDictionaryInfo(
|
|||||||
deps: YomitanParserRuntimeDeps,
|
deps: YomitanParserRuntimeDeps,
|
||||||
logger: LoggerLike,
|
logger: LoggerLike,
|
||||||
): Promise<YomitanDictionaryInfo[]> {
|
): Promise<YomitanDictionaryInfo[]> {
|
||||||
const result = await invokeYomitanBackendAction<unknown>('getDictionaryInfo', undefined, deps, logger);
|
const result = await invokeYomitanBackendAction<unknown>(
|
||||||
|
'getDictionaryInfo',
|
||||||
|
undefined,
|
||||||
|
deps,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
if (!Array.isArray(result)) {
|
if (!Array.isArray(result)) {
|
||||||
return [];
|
return [];
|
||||||
}
|
}
|
||||||
@@ -1553,7 +1551,12 @@ export async function getYomitanSettingsFull(
|
|||||||
deps: YomitanParserRuntimeDeps,
|
deps: YomitanParserRuntimeDeps,
|
||||||
logger: LoggerLike,
|
logger: LoggerLike,
|
||||||
): Promise<Record<string, unknown> | null> {
|
): Promise<Record<string, unknown> | null> {
|
||||||
const result = await invokeYomitanBackendAction<unknown>('optionsGetFull', undefined, deps, logger);
|
const result = await invokeYomitanBackendAction<unknown>(
|
||||||
|
'optionsGetFull',
|
||||||
|
undefined,
|
||||||
|
deps,
|
||||||
|
logger,
|
||||||
|
);
|
||||||
return isObject(result) ? result : null;
|
return isObject(result) ? result : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1660,7 +1663,7 @@ export async function upsertYomitanDictionarySettings(
|
|||||||
(entry) =>
|
(entry) =>
|
||||||
isObject(entry) &&
|
isObject(entry) &&
|
||||||
typeof (entry as { name?: unknown }).name === 'string' &&
|
typeof (entry as { name?: unknown }).name === 'string' &&
|
||||||
((entry as { name: string }).name.trim() === normalizedTitle),
|
(entry as { name: string }).name.trim() === normalizedTitle,
|
||||||
);
|
);
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
if (existingIndex >= 0) {
|
||||||
@@ -1676,7 +1679,7 @@ export async function upsertYomitanDictionarySettings(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
dictionaries.unshift(createDefaultDictionarySettings(normalizedTitle, true));
|
dictionaries.push(createDefaultDictionarySettings(normalizedTitle, true));
|
||||||
changed = true;
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -90,7 +90,10 @@ export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string)
|
|||||||
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
|
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
export function ensureExtensionCopy(sourceDir: string, userDataPath: string): {
|
export function ensureExtensionCopy(
|
||||||
|
sourceDir: string,
|
||||||
|
userDataPath: string,
|
||||||
|
): {
|
||||||
targetDir: string;
|
targetDir: string;
|
||||||
copied: boolean;
|
copied: boolean;
|
||||||
} {
|
} {
|
||||||
|
|||||||
@@ -75,7 +75,10 @@ test('ensureExtensionCopy refreshes copied extension when display files change',
|
|||||||
assert.equal(result.targetDir, targetDir);
|
assert.equal(result.targetDir, targetDir);
|
||||||
assert.equal(result.copied, true);
|
assert.equal(result.copied, true);
|
||||||
assert.equal(
|
assert.equal(
|
||||||
fs.readFileSync(path.join(targetDir, 'js', 'display', 'structured-content-generator.js'), 'utf8'),
|
fs.readFileSync(
|
||||||
|
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
|
||||||
|
'utf8',
|
||||||
|
),
|
||||||
'new display code',
|
'new display code',
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,13 +1,17 @@
|
|||||||
import { BrowserWindow, Extension, session } from 'electron';
|
import { BrowserWindow, Extension, session } from 'electron';
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
|
||||||
import { createLogger } from '../../logger';
|
import { createLogger } from '../../logger';
|
||||||
import { ensureExtensionCopy } from './yomitan-extension-copy';
|
import { ensureExtensionCopy } from './yomitan-extension-copy';
|
||||||
|
import {
|
||||||
|
getYomitanExtensionSearchPaths,
|
||||||
|
resolveExistingYomitanExtensionPath,
|
||||||
|
} from './yomitan-extension-paths';
|
||||||
|
|
||||||
const logger = createLogger('main:yomitan-extension-loader');
|
const logger = createLogger('main:yomitan-extension-loader');
|
||||||
|
|
||||||
export interface YomitanExtensionLoaderDeps {
|
export interface YomitanExtensionLoaderDeps {
|
||||||
userDataPath: string;
|
userDataPath: string;
|
||||||
|
extensionPath?: string;
|
||||||
getYomitanParserWindow: () => BrowserWindow | null;
|
getYomitanParserWindow: () => BrowserWindow | null;
|
||||||
setYomitanParserWindow: (window: BrowserWindow | null) => void;
|
setYomitanParserWindow: (window: BrowserWindow | null) => void;
|
||||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||||
@@ -18,25 +22,17 @@ export interface YomitanExtensionLoaderDeps {
|
|||||||
export async function loadYomitanExtension(
|
export async function loadYomitanExtension(
|
||||||
deps: YomitanExtensionLoaderDeps,
|
deps: YomitanExtensionLoaderDeps,
|
||||||
): Promise<Extension | null> {
|
): Promise<Extension | null> {
|
||||||
const searchPaths = [
|
const searchPaths = getYomitanExtensionSearchPaths({
|
||||||
path.join(__dirname, '..', '..', 'vendor', 'yomitan'),
|
explicitPath: deps.extensionPath,
|
||||||
path.join(__dirname, '..', '..', '..', 'vendor', 'yomitan'),
|
moduleDir: __dirname,
|
||||||
path.join(process.resourcesPath, 'yomitan'),
|
resourcesPath: process.resourcesPath,
|
||||||
'/usr/share/SubMiner/yomitan',
|
userDataPath: deps.userDataPath,
|
||||||
path.join(deps.userDataPath, 'yomitan'),
|
});
|
||||||
];
|
let extPath = resolveExistingYomitanExtensionPath(searchPaths, fs.existsSync);
|
||||||
|
|
||||||
let extPath: string | null = null;
|
|
||||||
for (const p of searchPaths) {
|
|
||||||
if (fs.existsSync(p)) {
|
|
||||||
extPath = p;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!extPath) {
|
if (!extPath) {
|
||||||
logger.error('Yomitan extension not found in any search path');
|
logger.error('Yomitan extension not found in any search path');
|
||||||
logger.error('Install Yomitan to one of:', searchPaths);
|
logger.error('Run `bun run build:yomitan` or install Yomitan to one of:', searchPaths);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
50
src/core/services/yomitan-extension-paths.test.ts
Normal file
50
src/core/services/yomitan-extension-paths.test.ts
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import path from 'node:path';
|
||||||
|
import test from 'node:test';
|
||||||
|
|
||||||
|
import {
|
||||||
|
getYomitanExtensionSearchPaths,
|
||||||
|
resolveExistingYomitanExtensionPath,
|
||||||
|
} from './yomitan-extension-paths';
|
||||||
|
|
||||||
|
test('getYomitanExtensionSearchPaths prioritizes generated build output before packaged fallbacks', () => {
|
||||||
|
const searchPaths = getYomitanExtensionSearchPaths({
|
||||||
|
cwd: '/repo',
|
||||||
|
moduleDir: '/repo/dist/core/services',
|
||||||
|
resourcesPath: '/opt/SubMiner/resources',
|
||||||
|
userDataPath: '/Users/kyle/.config/SubMiner',
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(searchPaths, [
|
||||||
|
path.join('/repo', 'build', 'yomitan'),
|
||||||
|
path.join('/opt/SubMiner/resources', 'yomitan'),
|
||||||
|
'/usr/share/SubMiner/yomitan',
|
||||||
|
path.join('/Users/kyle/.config/SubMiner', 'yomitan'),
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('resolveExistingYomitanExtensionPath returns first manifest-backed candidate', () => {
|
||||||
|
const existing = new Set<string>([
|
||||||
|
path.join('/repo', 'build', 'yomitan', 'manifest.json'),
|
||||||
|
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext', 'manifest.json'),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const resolved = resolveExistingYomitanExtensionPath(
|
||||||
|
[
|
||||||
|
path.join('/repo', 'build', 'yomitan'),
|
||||||
|
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext'),
|
||||||
|
],
|
||||||
|
(candidate) => existing.has(candidate),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(resolved, path.join('/repo', 'build', 'yomitan'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('resolveExistingYomitanExtensionPath ignores source tree without built manifest', () => {
|
||||||
|
const resolved = resolveExistingYomitanExtensionPath(
|
||||||
|
[path.join('/repo', 'vendor', 'subminer-yomitan', 'ext')],
|
||||||
|
() => false,
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(resolved, null);
|
||||||
|
});
|
||||||
60
src/core/services/yomitan-extension-paths.ts
Normal file
60
src/core/services/yomitan-extension-paths.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
import * as fs from 'node:fs';
|
||||||
|
import * as path from 'node:path';
|
||||||
|
|
||||||
|
export interface YomitanExtensionPathOptions {
|
||||||
|
explicitPath?: string;
|
||||||
|
cwd?: string;
|
||||||
|
moduleDir?: string;
|
||||||
|
resourcesPath?: string;
|
||||||
|
userDataPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
function pushUnique(values: string[], candidate: string | null | undefined): void {
|
||||||
|
if (!candidate || values.includes(candidate)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
values.push(candidate);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getYomitanExtensionSearchPaths(
|
||||||
|
options: YomitanExtensionPathOptions = {},
|
||||||
|
): string[] {
|
||||||
|
const searchPaths: string[] = [];
|
||||||
|
|
||||||
|
pushUnique(searchPaths, options.explicitPath ? path.resolve(options.explicitPath) : null);
|
||||||
|
pushUnique(searchPaths, options.cwd ? path.resolve(options.cwd, 'build', 'yomitan') : null);
|
||||||
|
pushUnique(
|
||||||
|
searchPaths,
|
||||||
|
options.moduleDir
|
||||||
|
? path.resolve(options.moduleDir, '..', '..', '..', 'build', 'yomitan')
|
||||||
|
: null,
|
||||||
|
);
|
||||||
|
pushUnique(
|
||||||
|
searchPaths,
|
||||||
|
options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null,
|
||||||
|
);
|
||||||
|
pushUnique(searchPaths, '/usr/share/SubMiner/yomitan');
|
||||||
|
pushUnique(searchPaths, options.userDataPath ? path.join(options.userDataPath, 'yomitan') : null);
|
||||||
|
|
||||||
|
return searchPaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveExistingYomitanExtensionPath(
|
||||||
|
searchPaths: string[],
|
||||||
|
existsSync: (path: string) => boolean = fs.existsSync,
|
||||||
|
): string | null {
|
||||||
|
for (const candidate of searchPaths) {
|
||||||
|
if (existsSync(path.join(candidate, 'manifest.json'))) {
|
||||||
|
return candidate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function resolveYomitanExtensionPath(
|
||||||
|
options: YomitanExtensionPathOptions = {},
|
||||||
|
existsSync: (path: string) => boolean = fs.existsSync,
|
||||||
|
): string | null {
|
||||||
|
return resolveExistingYomitanExtensionPath(getYomitanExtensionSearchPaths(options), existsSync);
|
||||||
|
}
|
||||||
@@ -2,6 +2,7 @@ import assert from 'node:assert/strict';
|
|||||||
import path from 'node:path';
|
import path from 'node:path';
|
||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import { pathToFileURL } from 'node:url';
|
import { pathToFileURL } from 'node:url';
|
||||||
|
import { resolveYomitanExtensionPath } from './yomitan-extension-paths';
|
||||||
|
|
||||||
class FakeStyle {
|
class FakeStyle {
|
||||||
private values = new Map<string, string>();
|
private values = new Map<string, string>();
|
||||||
@@ -155,15 +156,14 @@ function findFirstByClass(node: FakeNode, className: string): FakeNode | null {
|
|||||||
}
|
}
|
||||||
|
|
||||||
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
|
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
|
||||||
|
const yomitanRoot = resolveYomitanExtensionPath({ cwd: process.cwd() });
|
||||||
|
assert.ok(yomitanRoot, 'Run `bun run build:yomitan` before Yomitan integration tests.');
|
||||||
|
|
||||||
const { DisplayContentManager } = await import(
|
const { DisplayContentManager } = await import(
|
||||||
pathToFileURL(
|
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js')).href
|
||||||
path.join(process.cwd(), 'vendor/yomitan/js/display/display-content-manager.js'),
|
|
||||||
).href
|
|
||||||
);
|
);
|
||||||
const { StructuredContentGenerator } = await import(
|
const { StructuredContentGenerator } = await import(
|
||||||
pathToFileURL(
|
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js')).href
|
||||||
path.join(process.cwd(), 'vendor/yomitan/js/display/structured-content-generator.js'),
|
|
||||||
).href
|
|
||||||
);
|
);
|
||||||
|
|
||||||
const createObjectURLCalls: string[] = [];
|
const createObjectURLCalls: string[] = [];
|
||||||
@@ -197,14 +197,10 @@ test('StructuredContentGenerator uses direct img loading for popup glossary imag
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const generator = new StructuredContentGenerator(
|
const generator = new StructuredContentGenerator(manager, new FakeDocument(), {
|
||||||
manager,
|
|
||||||
new FakeDocument(),
|
|
||||||
{
|
|
||||||
devicePixelRatio: 1,
|
devicePixelRatio: 1,
|
||||||
navigator: { userAgent: 'Mozilla/5.0' },
|
navigator: { userAgent: 'Mozilla/5.0' },
|
||||||
},
|
});
|
||||||
);
|
|
||||||
|
|
||||||
const node = generator.createDefinitionImage(
|
const node = generator.createDefinitionImage(
|
||||||
{
|
{
|
||||||
|
|||||||
82
src/generate-config-example.test.ts
Normal file
82
src/generate-config-example.test.ts
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import fs from 'node:fs';
|
||||||
|
import path from 'node:path';
|
||||||
|
import test from 'node:test';
|
||||||
|
import {
|
||||||
|
resolveConfigExampleOutputPaths,
|
||||||
|
writeConfigExampleArtifacts,
|
||||||
|
} from './generate-config-example';
|
||||||
|
|
||||||
|
function createWorkspace(name: string): string {
|
||||||
|
const baseDir = path.join(process.cwd(), '.tmp', 'generate-config-example-test');
|
||||||
|
fs.mkdirSync(baseDir, { recursive: true });
|
||||||
|
return fs.mkdtempSync(path.join(baseDir, `${name}-`));
|
||||||
|
}
|
||||||
|
|
||||||
|
test('resolveConfigExampleOutputPaths includes sibling docs repo and never local docs/public', () => {
|
||||||
|
const workspace = createWorkspace('with-docs-repo');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
const docsRepoRoot = path.join(workspace, 'subminer-docs');
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
|
fs.mkdirSync(docsRepoRoot, { recursive: true });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
|
||||||
|
|
||||||
|
assert.deepEqual(outputPaths, [
|
||||||
|
path.join(projectRoot, 'config.example.jsonc'),
|
||||||
|
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
|
||||||
|
]);
|
||||||
|
assert.equal(
|
||||||
|
outputPaths.includes(path.join(projectRoot, 'docs', 'public', 'config.example.jsonc')),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('resolveConfigExampleOutputPaths stays repo-local when sibling docs repo is absent', () => {
|
||||||
|
const workspace = createWorkspace('without-docs-repo');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const outputPaths = resolveConfigExampleOutputPaths({ cwd: projectRoot });
|
||||||
|
|
||||||
|
assert.deepEqual(outputPaths, [path.join(projectRoot, 'config.example.jsonc')]);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('writeConfigExampleArtifacts creates parent directories for resolved outputs', () => {
|
||||||
|
const workspace = createWorkspace('write-artifacts');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
const docsRepoRoot = path.join(workspace, 'subminer-docs');
|
||||||
|
const template = '{\n "ok": true\n}\n';
|
||||||
|
|
||||||
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
|
fs.mkdirSync(docsRepoRoot, { recursive: true });
|
||||||
|
|
||||||
|
try {
|
||||||
|
const writtenPaths = writeConfigExampleArtifacts(template, {
|
||||||
|
cwd: projectRoot,
|
||||||
|
deps: { log: () => {} },
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(writtenPaths, [
|
||||||
|
path.join(projectRoot, 'config.example.jsonc'),
|
||||||
|
path.join(docsRepoRoot, 'public', 'config.example.jsonc'),
|
||||||
|
]);
|
||||||
|
assert.equal(fs.readFileSync(path.join(projectRoot, 'config.example.jsonc'), 'utf8'), template);
|
||||||
|
assert.equal(
|
||||||
|
fs.readFileSync(path.join(docsRepoRoot, 'public', 'config.example.jsonc'), 'utf8'),
|
||||||
|
template,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
@@ -2,18 +2,62 @@ import * as fs from 'fs';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { DEFAULT_CONFIG, generateConfigTemplate } from './config';
|
import { DEFAULT_CONFIG, generateConfigTemplate } from './config';
|
||||||
|
|
||||||
function main(): void {
|
type ConfigExampleFsDeps = {
|
||||||
const template = generateConfigTemplate(DEFAULT_CONFIG);
|
existsSync?: (candidate: string) => boolean;
|
||||||
const outputPaths = [
|
mkdirSync?: (candidate: string, options: { recursive: true }) => void;
|
||||||
path.join(process.cwd(), 'config.example.jsonc'),
|
writeFileSync?: (candidate: string, content: string, encoding: BufferEncoding) => void;
|
||||||
path.join(process.cwd(), 'docs', 'public', 'config.example.jsonc'),
|
log?: (message: string) => void;
|
||||||
];
|
};
|
||||||
|
|
||||||
for (const outputPath of outputPaths) {
|
export function resolveConfigExampleOutputPaths(options?: {
|
||||||
fs.mkdirSync(path.dirname(outputPath), { recursive: true });
|
cwd?: string;
|
||||||
fs.writeFileSync(outputPath, template, 'utf-8');
|
docsRepoName?: string;
|
||||||
console.log(`Generated ${outputPath}`);
|
existsSync?: (candidate: string) => boolean;
|
||||||
|
}): string[] {
|
||||||
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
|
const existsSync = options?.existsSync ?? fs.existsSync;
|
||||||
|
const docsRepoName = options?.docsRepoName ?? 'subminer-docs';
|
||||||
|
const outputPaths = [path.join(cwd, 'config.example.jsonc')];
|
||||||
|
const docsRepoRoot = path.resolve(cwd, '..', docsRepoName);
|
||||||
|
|
||||||
|
if (existsSync(docsRepoRoot)) {
|
||||||
|
outputPaths.push(path.join(docsRepoRoot, 'public', 'config.example.jsonc'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return outputPaths;
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
export function writeConfigExampleArtifacts(
|
||||||
|
template: string,
|
||||||
|
options?: {
|
||||||
|
cwd?: string;
|
||||||
|
docsRepoName?: string;
|
||||||
|
deps?: ConfigExampleFsDeps;
|
||||||
|
},
|
||||||
|
): string[] {
|
||||||
|
const mkdirSync = options?.deps?.mkdirSync ?? fs.mkdirSync;
|
||||||
|
const writeFileSync = options?.deps?.writeFileSync ?? fs.writeFileSync;
|
||||||
|
const log = options?.deps?.log ?? console.log;
|
||||||
|
const outputPaths = resolveConfigExampleOutputPaths({
|
||||||
|
cwd: options?.cwd,
|
||||||
|
docsRepoName: options?.docsRepoName,
|
||||||
|
existsSync: options?.deps?.existsSync,
|
||||||
|
});
|
||||||
|
|
||||||
|
for (const outputPath of outputPaths) {
|
||||||
|
mkdirSync(path.dirname(outputPath), { recursive: true });
|
||||||
|
writeFileSync(outputPath, template, 'utf-8');
|
||||||
|
log(`Generated ${outputPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return outputPaths;
|
||||||
|
}
|
||||||
|
|
||||||
|
function main(): void {
|
||||||
|
const template = generateConfigTemplate(DEFAULT_CONFIG);
|
||||||
|
writeConfigExampleArtifacts(template);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (require.main === module) {
|
||||||
|
main();
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,12 +1,35 @@
|
|||||||
import assert from 'node:assert/strict';
|
import assert from 'node:assert/strict';
|
||||||
import test from 'node:test';
|
import test from 'node:test';
|
||||||
import {
|
import {
|
||||||
|
normalizeStartupArgv,
|
||||||
sanitizeHelpEnv,
|
sanitizeHelpEnv,
|
||||||
|
sanitizeStartupEnv,
|
||||||
sanitizeBackgroundEnv,
|
sanitizeBackgroundEnv,
|
||||||
shouldDetachBackgroundLaunch,
|
shouldDetachBackgroundLaunch,
|
||||||
shouldHandleHelpOnlyAtEntry,
|
shouldHandleHelpOnlyAtEntry,
|
||||||
} from './main-entry-runtime';
|
} from './main-entry-runtime';
|
||||||
|
|
||||||
|
test('normalizeStartupArgv defaults no-arg startup to --start --background', () => {
|
||||||
|
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage'], {}), [
|
||||||
|
'SubMiner.AppImage',
|
||||||
|
'--start',
|
||||||
|
'--background',
|
||||||
|
]);
|
||||||
|
assert.deepEqual(
|
||||||
|
normalizeStartupArgv(['SubMiner.AppImage', '--password-store', 'gnome-libsecret'], {}),
|
||||||
|
['SubMiner.AppImage', '--password-store', 'gnome-libsecret', '--start', '--background'],
|
||||||
|
);
|
||||||
|
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--background'], {}), [
|
||||||
|
'SubMiner.AppImage',
|
||||||
|
'--background',
|
||||||
|
'--start',
|
||||||
|
]);
|
||||||
|
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--help'], {}), [
|
||||||
|
'SubMiner.AppImage',
|
||||||
|
'--help',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], {}), true);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], {}), true);
|
||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help', '--start'], {}), false);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help', '--start'], {}), false);
|
||||||
@@ -14,6 +37,14 @@ test('shouldHandleHelpOnlyAtEntry detects help-only invocation', () => {
|
|||||||
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], { ELECTRON_RUN_AS_NODE: '1' }), false);
|
assert.equal(shouldHandleHelpOnlyAtEntry(['--help'], { ELECTRON_RUN_AS_NODE: '1' }), false);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('sanitizeStartupEnv suppresses warnings and lsfg layer', () => {
|
||||||
|
const env = sanitizeStartupEnv({
|
||||||
|
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
||||||
|
});
|
||||||
|
assert.equal(env.NODE_NO_WARNINGS, '1');
|
||||||
|
assert.equal('VK_INSTANCE_LAYERS' in env, false);
|
||||||
|
});
|
||||||
|
|
||||||
test('sanitizeHelpEnv suppresses warnings and lsfg layer', () => {
|
test('sanitizeHelpEnv suppresses warnings and lsfg layer', () => {
|
||||||
const env = sanitizeHelpEnv({
|
const env = sanitizeHelpEnv({
|
||||||
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
VK_INSTANCE_LAYERS: 'foo:lsfg-vk:bar',
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { CliArgs, parseArgs, shouldStartApp } from './cli/args';
|
import { CliArgs, parseArgs, shouldStartApp } from './cli/args';
|
||||||
|
|
||||||
const BACKGROUND_ARG = '--background';
|
const BACKGROUND_ARG = '--background';
|
||||||
|
const START_ARG = '--start';
|
||||||
|
const PASSWORD_STORE_ARG = '--password-store';
|
||||||
const BACKGROUND_CHILD_ENV = 'SUBMINER_BACKGROUND_CHILD';
|
const BACKGROUND_CHILD_ENV = 'SUBMINER_BACKGROUND_CHILD';
|
||||||
|
|
||||||
function removeLsfgLayer(env: NodeJS.ProcessEnv): void {
|
function removeLsfgLayer(env: NodeJS.ProcessEnv): void {
|
||||||
@@ -9,10 +11,54 @@ function removeLsfgLayer(env: NodeJS.ProcessEnv): void {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function removePassiveStartupArgs(argv: string[]): string[] {
|
||||||
|
const filtered: string[] = [];
|
||||||
|
|
||||||
|
for (let i = 0; i < argv.length; i += 1) {
|
||||||
|
const arg = argv[i];
|
||||||
|
if (!arg) continue;
|
||||||
|
|
||||||
|
if (arg === PASSWORD_STORE_ARG) {
|
||||||
|
const value = argv[i + 1];
|
||||||
|
if (value && !value.startsWith('--')) {
|
||||||
|
i += 1;
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (arg.startsWith(`${PASSWORD_STORE_ARG}=`)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
filtered.push(arg);
|
||||||
|
}
|
||||||
|
|
||||||
|
return filtered;
|
||||||
|
}
|
||||||
|
|
||||||
function parseCliArgs(argv: string[]): CliArgs {
|
function parseCliArgs(argv: string[]): CliArgs {
|
||||||
return parseArgs(argv);
|
return parseArgs(argv);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function normalizeStartupArgv(argv: string[], env: NodeJS.ProcessEnv): string[] {
|
||||||
|
if (env.ELECTRON_RUN_AS_NODE === '1') return argv;
|
||||||
|
|
||||||
|
const effectiveArgs = removePassiveStartupArgs(argv.slice(1));
|
||||||
|
if (effectiveArgs.length === 0) {
|
||||||
|
return [...argv, START_ARG, BACKGROUND_ARG];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
effectiveArgs.length === 1 &&
|
||||||
|
effectiveArgs[0] === BACKGROUND_ARG &&
|
||||||
|
!argv.includes(START_ARG)
|
||||||
|
) {
|
||||||
|
return [...argv, START_ARG];
|
||||||
|
}
|
||||||
|
|
||||||
|
return argv;
|
||||||
|
}
|
||||||
|
|
||||||
export function shouldDetachBackgroundLaunch(argv: string[], env: NodeJS.ProcessEnv): boolean {
|
export function shouldDetachBackgroundLaunch(argv: string[], env: NodeJS.ProcessEnv): boolean {
|
||||||
if (env.ELECTRON_RUN_AS_NODE === '1') return false;
|
if (env.ELECTRON_RUN_AS_NODE === '1') return false;
|
||||||
if (!argv.includes(BACKGROUND_ARG)) return false;
|
if (!argv.includes(BACKGROUND_ARG)) return false;
|
||||||
@@ -26,7 +72,7 @@ export function shouldHandleHelpOnlyAtEntry(argv: string[], env: NodeJS.ProcessE
|
|||||||
return args.help && !shouldStartApp(args);
|
return args.help && !shouldStartApp(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sanitizeHelpEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
export function sanitizeStartupEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||||
const env = { ...baseEnv };
|
const env = { ...baseEnv };
|
||||||
if (!env.NODE_NO_WARNINGS) {
|
if (!env.NODE_NO_WARNINGS) {
|
||||||
env.NODE_NO_WARNINGS = '1';
|
env.NODE_NO_WARNINGS = '1';
|
||||||
@@ -35,8 +81,12 @@ export function sanitizeHelpEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
|||||||
return env;
|
return env;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function sanitizeHelpEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||||
|
return sanitizeStartupEnv(baseEnv);
|
||||||
|
}
|
||||||
|
|
||||||
export function sanitizeBackgroundEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
export function sanitizeBackgroundEnv(baseEnv: NodeJS.ProcessEnv): NodeJS.ProcessEnv {
|
||||||
const env = sanitizeHelpEnv(baseEnv);
|
const env = sanitizeStartupEnv(baseEnv);
|
||||||
env[BACKGROUND_CHILD_ENV] = '1';
|
env[BACKGROUND_CHILD_ENV] = '1';
|
||||||
return env;
|
return env;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
import { spawn } from 'node:child_process';
|
import { spawn } from 'node:child_process';
|
||||||
import { printHelp } from './cli/help';
|
import { printHelp } from './cli/help';
|
||||||
import {
|
import {
|
||||||
|
normalizeStartupArgv,
|
||||||
|
sanitizeStartupEnv,
|
||||||
sanitizeBackgroundEnv,
|
sanitizeBackgroundEnv,
|
||||||
sanitizeHelpEnv,
|
sanitizeHelpEnv,
|
||||||
shouldDetachBackgroundLaunch,
|
shouldDetachBackgroundLaunch,
|
||||||
@@ -9,6 +11,21 @@ import {
|
|||||||
|
|
||||||
const DEFAULT_TEXTHOOKER_PORT = 5174;
|
const DEFAULT_TEXTHOOKER_PORT = 5174;
|
||||||
|
|
||||||
|
function applySanitizedEnv(sanitizedEnv: NodeJS.ProcessEnv): void {
|
||||||
|
if (sanitizedEnv.NODE_NO_WARNINGS) {
|
||||||
|
process.env.NODE_NO_WARNINGS = sanitizedEnv.NODE_NO_WARNINGS;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sanitizedEnv.VK_INSTANCE_LAYERS) {
|
||||||
|
process.env.VK_INSTANCE_LAYERS = sanitizedEnv.VK_INSTANCE_LAYERS;
|
||||||
|
} else {
|
||||||
|
delete process.env.VK_INSTANCE_LAYERS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
process.argv = normalizeStartupArgv(process.argv, process.env);
|
||||||
|
applySanitizedEnv(sanitizeStartupEnv(process.env));
|
||||||
|
|
||||||
if (shouldDetachBackgroundLaunch(process.argv, process.env)) {
|
if (shouldDetachBackgroundLaunch(process.argv, process.env)) {
|
||||||
const child = spawn(process.execPath, process.argv.slice(1), {
|
const child = spawn(process.execPath, process.argv.slice(1), {
|
||||||
detached: true,
|
detached: true,
|
||||||
|
|||||||
243
src/main.ts
243
src/main.ts
@@ -239,6 +239,11 @@ import {
|
|||||||
resolveKeybindings,
|
resolveKeybindings,
|
||||||
showDesktopNotification,
|
showDesktopNotification,
|
||||||
} from './core/utils';
|
} from './core/utils';
|
||||||
|
import {
|
||||||
|
ensureDefaultConfigBootstrap,
|
||||||
|
getDefaultConfigFilePaths,
|
||||||
|
resolveDefaultMpvInstallPaths,
|
||||||
|
} from './shared/setup-state';
|
||||||
import {
|
import {
|
||||||
ImmersionTrackerService,
|
ImmersionTrackerService,
|
||||||
JellyfinRemoteSessionService,
|
JellyfinRemoteSessionService,
|
||||||
@@ -296,6 +301,21 @@ import {
|
|||||||
upsertYomitanDictionarySettings,
|
upsertYomitanDictionarySettings,
|
||||||
updateLastCardFromClipboard as updateLastCardFromClipboardCore,
|
updateLastCardFromClipboard as updateLastCardFromClipboardCore,
|
||||||
} from './core/services';
|
} from './core/services';
|
||||||
|
import {
|
||||||
|
createFirstRunSetupService,
|
||||||
|
shouldAutoOpenFirstRunSetup,
|
||||||
|
} from './main/runtime/first-run-setup-service';
|
||||||
|
import {
|
||||||
|
buildFirstRunSetupHtml,
|
||||||
|
createMaybeFocusExistingFirstRunSetupWindowHandler,
|
||||||
|
createOpenFirstRunSetupWindowHandler,
|
||||||
|
parseFirstRunSetupSubmissionUrl,
|
||||||
|
type FirstRunSetupAction,
|
||||||
|
} from './main/runtime/first-run-setup-window';
|
||||||
|
import {
|
||||||
|
detectInstalledFirstRunPlugin,
|
||||||
|
installFirstRunPluginToDefaultLocation,
|
||||||
|
} from './main/runtime/first-run-setup-plugin';
|
||||||
import { createImmersionTrackerStartupHandler } from './main/runtime/immersion-startup';
|
import { createImmersionTrackerStartupHandler } from './main/runtime/immersion-startup';
|
||||||
import { createBuildImmersionTrackerStartupMainDepsHandler } from './main/runtime/immersion-startup-main-deps';
|
import { createBuildImmersionTrackerStartupMainDepsHandler } from './main/runtime/immersion-startup-main-deps';
|
||||||
import { createAnilistUpdateQueue } from './core/services/anilist/anilist-update-queue';
|
import { createAnilistUpdateQueue } from './core/services/anilist/anilist-update-queue';
|
||||||
@@ -375,7 +395,7 @@ if (process.platform === 'linux') {
|
|||||||
getPasswordStoreArg(process.argv) ?? getDefaultPasswordStore(),
|
getPasswordStoreArg(process.argv) ?? getDefaultPasswordStore(),
|
||||||
);
|
);
|
||||||
app.commandLine.appendSwitch('password-store', passwordStore);
|
app.commandLine.appendSwitch('password-store', passwordStore);
|
||||||
console.debug(`[main] Applied --password-store ${passwordStore}`);
|
createLogger('main').debug(`Applied --password-store ${passwordStore}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
app.setName('SubMiner');
|
app.setName('SubMiner');
|
||||||
@@ -495,6 +515,7 @@ const anilistUpdateQueue = createAnilistUpdateQueue(
|
|||||||
const isDev = process.argv.includes('--dev') || process.argv.includes('--debug');
|
const isDev = process.argv.includes('--dev') || process.argv.includes('--debug');
|
||||||
const texthookerService = new Texthooker();
|
const texthookerService = new Texthooker();
|
||||||
const subtitleWsService = new SubtitleWebSocket();
|
const subtitleWsService = new SubtitleWebSocket();
|
||||||
|
const annotationSubtitleWsService = new SubtitleWebSocket();
|
||||||
const logger = createLogger('main');
|
const logger = createLogger('main');
|
||||||
notifyAnilistTokenStoreWarning = (message: string) => {
|
notifyAnilistTokenStoreWarning = (message: string) => {
|
||||||
logger.warn(`[AniList] ${message}`);
|
logger.warn(`[AniList] ${message}`);
|
||||||
@@ -601,6 +622,41 @@ const appState = createAppState({
|
|||||||
mpvSocketPath: getDefaultSocketPath(),
|
mpvSocketPath: getDefaultSocketPath(),
|
||||||
texthookerPort: DEFAULT_TEXTHOOKER_PORT,
|
texthookerPort: DEFAULT_TEXTHOOKER_PORT,
|
||||||
});
|
});
|
||||||
|
let firstRunSetupMessage: string | null = null;
|
||||||
|
const firstRunSetupService = createFirstRunSetupService({
|
||||||
|
configDir: CONFIG_DIR,
|
||||||
|
getYomitanDictionaryCount: async () => {
|
||||||
|
await ensureYomitanExtensionLoaded();
|
||||||
|
const dictionaries = await getYomitanDictionaryInfo(getYomitanParserRuntimeDeps(), {
|
||||||
|
error: (message, ...args) => logger.error(message, ...args),
|
||||||
|
info: (message, ...args) => logger.info(message, ...args),
|
||||||
|
});
|
||||||
|
return dictionaries.length;
|
||||||
|
},
|
||||||
|
detectPluginInstalled: () => {
|
||||||
|
const installPaths = resolveDefaultMpvInstallPaths(
|
||||||
|
process.platform,
|
||||||
|
os.homedir(),
|
||||||
|
process.env.XDG_CONFIG_HOME,
|
||||||
|
);
|
||||||
|
return detectInstalledFirstRunPlugin(installPaths);
|
||||||
|
},
|
||||||
|
installPlugin: async () =>
|
||||||
|
installFirstRunPluginToDefaultLocation({
|
||||||
|
platform: process.platform,
|
||||||
|
homeDir: os.homedir(),
|
||||||
|
xdgConfigHome: process.env.XDG_CONFIG_HOME,
|
||||||
|
dirname: __dirname,
|
||||||
|
appPath: app.getAppPath(),
|
||||||
|
resourcesPath: process.resourcesPath,
|
||||||
|
}),
|
||||||
|
onStateChanged: (state) => {
|
||||||
|
appState.firstRunSetupCompleted = state.status === 'completed';
|
||||||
|
if (appTray) {
|
||||||
|
ensureTray();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
});
|
||||||
const discordPresenceSessionStartedAtMs = Date.now();
|
const discordPresenceSessionStartedAtMs = Date.now();
|
||||||
let discordPresenceMediaDurationSec: number | null = null;
|
let discordPresenceMediaDurationSec: number | null = null;
|
||||||
|
|
||||||
@@ -890,6 +946,11 @@ const buildSubtitleProcessingControllerMainDepsHandler =
|
|||||||
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
|
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
|
||||||
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
|
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
|
||||||
});
|
});
|
||||||
|
annotationSubtitleWsService.broadcast(payload, {
|
||||||
|
enabled: getResolvedConfig().subtitleStyle.frequencyDictionary.enabled,
|
||||||
|
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
|
||||||
|
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
|
||||||
|
});
|
||||||
},
|
},
|
||||||
logDebug: (message) => {
|
logDebug: (message) => {
|
||||||
logger.debug(`[subtitle-processing] ${message}`);
|
logger.debug(`[subtitle-processing] ${message}`);
|
||||||
@@ -1147,6 +1208,8 @@ const characterDictionaryRuntime = createCharacterDictionaryRuntimeService({
|
|||||||
getCurrentMediaTitle: () => appState.currentMediaTitle,
|
getCurrentMediaTitle: () => appState.currentMediaTitle,
|
||||||
resolveMediaPathForJimaku: (mediaPath) => mediaRuntime.resolveMediaPathForJimaku(mediaPath),
|
resolveMediaPathForJimaku: (mediaPath) => mediaRuntime.resolveMediaPathForJimaku(mediaPath),
|
||||||
guessAnilistMediaInfo: (mediaPath, mediaTitle) => guessAnilistMediaInfo(mediaPath, mediaTitle),
|
guessAnilistMediaInfo: (mediaPath, mediaTitle) => guessAnilistMediaInfo(mediaPath, mediaTitle),
|
||||||
|
getCollapsibleSectionOpenState: (section) =>
|
||||||
|
getResolvedConfig().anilist.characterDictionary.collapsibleSections[section],
|
||||||
now: () => Date.now(),
|
now: () => Date.now(),
|
||||||
logInfo: (message) => logger.info(message),
|
logInfo: (message) => logger.info(message),
|
||||||
logWarn: (message) => logger.warn(message),
|
logWarn: (message) => logger.warn(message),
|
||||||
@@ -1594,6 +1657,95 @@ const {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const maybeFocusExistingFirstRunSetupWindow = createMaybeFocusExistingFirstRunSetupWindowHandler({
|
||||||
|
getSetupWindow: () => appState.firstRunSetupWindow,
|
||||||
|
});
|
||||||
|
const openFirstRunSetupWindowHandler = createOpenFirstRunSetupWindowHandler({
|
||||||
|
maybeFocusExistingSetupWindow: maybeFocusExistingFirstRunSetupWindow,
|
||||||
|
createSetupWindow: () =>
|
||||||
|
new BrowserWindow({
|
||||||
|
width: 480,
|
||||||
|
height: 460,
|
||||||
|
title: 'SubMiner Setup',
|
||||||
|
show: true,
|
||||||
|
autoHideMenuBar: true,
|
||||||
|
resizable: false,
|
||||||
|
minimizable: false,
|
||||||
|
maximizable: false,
|
||||||
|
webPreferences: {
|
||||||
|
nodeIntegration: false,
|
||||||
|
contextIsolation: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
getSetupSnapshot: async () => {
|
||||||
|
const snapshot = await firstRunSetupService.getSetupStatus();
|
||||||
|
return {
|
||||||
|
configReady: snapshot.configReady,
|
||||||
|
dictionaryCount: snapshot.dictionaryCount,
|
||||||
|
canFinish: snapshot.canFinish,
|
||||||
|
pluginStatus: snapshot.pluginStatus,
|
||||||
|
pluginInstallPathSummary: snapshot.pluginInstallPathSummary,
|
||||||
|
message: firstRunSetupMessage,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
buildSetupHtml: (model) => buildFirstRunSetupHtml(model),
|
||||||
|
parseSubmissionUrl: (rawUrl) => parseFirstRunSetupSubmissionUrl(rawUrl),
|
||||||
|
handleAction: async (action: FirstRunSetupAction) => {
|
||||||
|
if (action === 'install-plugin') {
|
||||||
|
const snapshot = await firstRunSetupService.installMpvPlugin();
|
||||||
|
firstRunSetupMessage = snapshot.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (action === 'open-yomitan-settings') {
|
||||||
|
openYomitanSettings();
|
||||||
|
firstRunSetupMessage = 'Opened Yomitan settings. Install dictionaries, then refresh status.';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (action === 'refresh') {
|
||||||
|
const snapshot = await firstRunSetupService.refreshStatus('Status refreshed.');
|
||||||
|
firstRunSetupMessage = snapshot.message;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (action === 'skip-plugin') {
|
||||||
|
await firstRunSetupService.skipPluginInstall();
|
||||||
|
firstRunSetupMessage = 'mpv plugin installation skipped.';
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const snapshot = await firstRunSetupService.markSetupCompleted();
|
||||||
|
if (snapshot.state.status === 'completed') {
|
||||||
|
firstRunSetupMessage = null;
|
||||||
|
return { closeWindow: true };
|
||||||
|
}
|
||||||
|
firstRunSetupMessage = 'Install at least one Yomitan dictionary before finishing setup.';
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
markSetupInProgress: async () => {
|
||||||
|
firstRunSetupMessage = null;
|
||||||
|
await firstRunSetupService.markSetupInProgress();
|
||||||
|
},
|
||||||
|
markSetupCancelled: async () => {
|
||||||
|
firstRunSetupMessage = null;
|
||||||
|
await firstRunSetupService.markSetupCancelled();
|
||||||
|
},
|
||||||
|
isSetupCompleted: () => firstRunSetupService.isSetupCompleted(),
|
||||||
|
clearSetupWindow: () => {
|
||||||
|
appState.firstRunSetupWindow = null;
|
||||||
|
},
|
||||||
|
setSetupWindow: (window) => {
|
||||||
|
appState.firstRunSetupWindow = window as BrowserWindow;
|
||||||
|
},
|
||||||
|
encodeURIComponent: (value) => encodeURIComponent(value),
|
||||||
|
logError: (message, error) => logger.error(message, error),
|
||||||
|
});
|
||||||
|
|
||||||
|
function openFirstRunSetupWindow(): void {
|
||||||
|
if (firstRunSetupService.isSetupCompleted()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
openFirstRunSetupWindowHandler();
|
||||||
|
}
|
||||||
|
|
||||||
const {
|
const {
|
||||||
notifyAnilistSetup,
|
notifyAnilistSetup,
|
||||||
consumeAnilistSetupTokenFromUrl,
|
consumeAnilistSetupTokenFromUrl,
|
||||||
@@ -1646,7 +1798,7 @@ const {
|
|||||||
appPath
|
appPath
|
||||||
? app.setAsDefaultProtocolClient(scheme, appPath, args)
|
? app.setAsDefaultProtocolClient(scheme, appPath, args)
|
||||||
: app.setAsDefaultProtocolClient(scheme),
|
: app.setAsDefaultProtocolClient(scheme),
|
||||||
logWarn: (message, details) => logger.warn(message, details),
|
logDebug: (message, details) => logger.debug(message, details),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -2016,7 +2168,10 @@ const {
|
|||||||
restoreOverlayMpvSubtitles();
|
restoreOverlayMpvSubtitles();
|
||||||
},
|
},
|
||||||
unregisterAllGlobalShortcuts: () => globalShortcut.unregisterAll(),
|
unregisterAllGlobalShortcuts: () => globalShortcut.unregisterAll(),
|
||||||
stopSubtitleWebsocket: () => subtitleWsService.stop(),
|
stopSubtitleWebsocket: () => {
|
||||||
|
subtitleWsService.stop();
|
||||||
|
annotationSubtitleWsService.stop();
|
||||||
|
},
|
||||||
stopTexthookerService: () => texthookerService.stop(),
|
stopTexthookerService: () => texthookerService.stop(),
|
||||||
getYomitanParserWindow: () => appState.yomitanParserWindow,
|
getYomitanParserWindow: () => appState.yomitanParserWindow,
|
||||||
clearYomitanParserState: () => {
|
clearYomitanParserState: () => {
|
||||||
@@ -2121,6 +2276,13 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
appReadyRuntimeMainDeps: {
|
appReadyRuntimeMainDeps: {
|
||||||
|
ensureDefaultConfigBootstrap: () => {
|
||||||
|
ensureDefaultConfigBootstrap({
|
||||||
|
configDir: CONFIG_DIR,
|
||||||
|
configFilePaths: getDefaultConfigFilePaths(CONFIG_DIR),
|
||||||
|
generateTemplate: () => generateConfigTemplate(DEFAULT_CONFIG),
|
||||||
|
});
|
||||||
|
},
|
||||||
loadSubtitlePosition: () => loadSubtitlePosition(),
|
loadSubtitlePosition: () => loadSubtitlePosition(),
|
||||||
resolveKeybindings: () => {
|
resolveKeybindings: () => {
|
||||||
appState.keybindings = resolveKeybindings(getResolvedConfig(), DEFAULT_KEYBINDINGS);
|
appState.keybindings = resolveKeybindings(getResolvedConfig(), DEFAULT_KEYBINDINGS);
|
||||||
@@ -2155,9 +2317,49 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
|
|||||||
},
|
},
|
||||||
defaultSecondarySubMode: 'hover',
|
defaultSecondarySubMode: 'hover',
|
||||||
defaultWebsocketPort: DEFAULT_CONFIG.websocket.port,
|
defaultWebsocketPort: DEFAULT_CONFIG.websocket.port,
|
||||||
|
defaultAnnotationWebsocketPort: DEFAULT_CONFIG.annotationWebsocket.port,
|
||||||
|
defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT,
|
||||||
hasMpvWebsocketPlugin: () => hasMpvWebsocketPlugin(),
|
hasMpvWebsocketPlugin: () => hasMpvWebsocketPlugin(),
|
||||||
startSubtitleWebsocket: (port: number) => {
|
startSubtitleWebsocket: (port: number) => {
|
||||||
subtitleWsService.start(port, () => appState.currentSubText);
|
subtitleWsService.start(
|
||||||
|
port,
|
||||||
|
() =>
|
||||||
|
appState.currentSubtitleData ??
|
||||||
|
(appState.currentSubText
|
||||||
|
? {
|
||||||
|
text: appState.currentSubText,
|
||||||
|
tokens: null,
|
||||||
|
}
|
||||||
|
: null),
|
||||||
|
() => ({
|
||||||
|
enabled: getResolvedConfig().subtitleStyle.frequencyDictionary.enabled,
|
||||||
|
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
|
||||||
|
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
startAnnotationWebsocket: (port: number) => {
|
||||||
|
annotationSubtitleWsService.start(
|
||||||
|
port,
|
||||||
|
() =>
|
||||||
|
appState.currentSubtitleData ??
|
||||||
|
(appState.currentSubText
|
||||||
|
? {
|
||||||
|
text: appState.currentSubText,
|
||||||
|
tokens: null,
|
||||||
|
}
|
||||||
|
: null),
|
||||||
|
() => ({
|
||||||
|
enabled: getResolvedConfig().subtitleStyle.frequencyDictionary.enabled,
|
||||||
|
topX: getResolvedConfig().subtitleStyle.frequencyDictionary.topX,
|
||||||
|
mode: getResolvedConfig().subtitleStyle.frequencyDictionary.mode,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
startTexthooker: (port: number, websocketUrl?: string) => {
|
||||||
|
if (!texthookerService.isRunning()) {
|
||||||
|
texthookerService.start(port, websocketUrl);
|
||||||
|
}
|
||||||
},
|
},
|
||||||
log: (message) => appLogger.logInfo(message),
|
log: (message) => appLogger.logInfo(message),
|
||||||
createMecabTokenizerAndCheck: async () => {
|
createMecabTokenizerAndCheck: async () => {
|
||||||
@@ -2170,6 +2372,17 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
|
|||||||
loadYomitanExtension: async () => {
|
loadYomitanExtension: async () => {
|
||||||
await loadYomitanExtension();
|
await loadYomitanExtension();
|
||||||
},
|
},
|
||||||
|
handleFirstRunSetup: async () => {
|
||||||
|
const snapshot = await firstRunSetupService.ensureSetupStateInitialized();
|
||||||
|
appState.firstRunSetupCompleted = snapshot.state.status === 'completed';
|
||||||
|
if (
|
||||||
|
appState.initialArgs &&
|
||||||
|
shouldAutoOpenFirstRunSetup(appState.initialArgs) &&
|
||||||
|
snapshot.state.status !== 'completed'
|
||||||
|
) {
|
||||||
|
openFirstRunSetupWindow();
|
||||||
|
}
|
||||||
|
},
|
||||||
startJellyfinRemoteSession: async () => {
|
startJellyfinRemoteSession: async () => {
|
||||||
await startJellyfinRemoteSession();
|
await startJellyfinRemoteSession();
|
||||||
},
|
},
|
||||||
@@ -2190,7 +2403,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
|
|||||||
shouldSkipHeavyStartup: () =>
|
shouldSkipHeavyStartup: () =>
|
||||||
Boolean(
|
Boolean(
|
||||||
appState.initialArgs &&
|
appState.initialArgs &&
|
||||||
(shouldRunSettingsOnlyStartup(appState.initialArgs) || appState.initialArgs.dictionary),
|
(shouldRunSettingsOnlyStartup(appState.initialArgs) ||
|
||||||
|
appState.initialArgs.dictionary ||
|
||||||
|
appState.initialArgs.setup),
|
||||||
),
|
),
|
||||||
createImmersionTracker: () => {
|
createImmersionTracker: () => {
|
||||||
ensureImmersionTrackerStarted();
|
ensureImmersionTrackerStarted();
|
||||||
@@ -2203,12 +2418,11 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
|
|||||||
immersionTrackerStartupMainDeps,
|
immersionTrackerStartupMainDeps,
|
||||||
});
|
});
|
||||||
|
|
||||||
const { runAndApplyStartupState } =
|
const { runAndApplyStartupState } = runtimeRegistry.startup.createStartupRuntimeHandlers<
|
||||||
runtimeRegistry.startup.createStartupRuntimeHandlers<
|
|
||||||
CliArgs,
|
CliArgs,
|
||||||
StartupState,
|
StartupState,
|
||||||
ReturnType<typeof createStartupBootstrapRuntimeDeps>
|
ReturnType<typeof createStartupBootstrapRuntimeDeps>
|
||||||
>({
|
>({
|
||||||
appLifecycleRuntimeRunnerMainDeps: {
|
appLifecycleRuntimeRunnerMainDeps: {
|
||||||
app,
|
app,
|
||||||
platform: process.platform,
|
platform: process.platform,
|
||||||
@@ -2261,7 +2475,7 @@ const { runAndApplyStartupState } =
|
|||||||
createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps),
|
createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps),
|
||||||
runStartupBootstrapRuntime,
|
runStartupBootstrapRuntime,
|
||||||
applyStartupState: (startupState) => applyStartupState(appState, startupState),
|
applyStartupState: (startupState) => applyStartupState(appState, startupState),
|
||||||
});
|
});
|
||||||
|
|
||||||
runAndApplyStartupState();
|
runAndApplyStartupState();
|
||||||
if (isAnilistTrackingEnabled(getResolvedConfig())) {
|
if (isAnilistTrackingEnabled(getResolvedConfig())) {
|
||||||
@@ -2987,6 +3201,7 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({
|
|||||||
shiftSubtitleDelayToAdjacentCueHandler(direction),
|
shiftSubtitleDelayToAdjacentCueHandler(direction),
|
||||||
sendMpvCommand: (rawCommand: (string | number)[]) =>
|
sendMpvCommand: (rawCommand: (string | number)[]) =>
|
||||||
sendMpvCommandRuntime(appState.mpvClient, rawCommand),
|
sendMpvCommandRuntime(appState.mpvClient, rawCommand),
|
||||||
|
getMpvClient: () => appState.mpvClient,
|
||||||
isMpvConnected: () => Boolean(appState.mpvClient && appState.mpvClient.connected),
|
isMpvConnected: () => Boolean(appState.mpvClient && appState.mpvClient.connected),
|
||||||
hasRuntimeOptionsManager: () => appState.runtimeOptionsManager !== null,
|
hasRuntimeOptionsManager: () => appState.runtimeOptionsManager !== null,
|
||||||
},
|
},
|
||||||
@@ -3090,6 +3305,7 @@ const createCliCommandContextHandler = createCliCommandContextFactory({
|
|||||||
showMpvOsd: (text: string) => showMpvOsd(text),
|
showMpvOsd: (text: string) => showMpvOsd(text),
|
||||||
initializeOverlayRuntime: () => initializeOverlayRuntime(),
|
initializeOverlayRuntime: () => initializeOverlayRuntime(),
|
||||||
toggleVisibleOverlay: () => toggleVisibleOverlay(),
|
toggleVisibleOverlay: () => toggleVisibleOverlay(),
|
||||||
|
openFirstRunSetupWindow: () => openFirstRunSetupWindow(),
|
||||||
setVisibleOverlayVisible: (visible: boolean) => setVisibleOverlayVisible(visible),
|
setVisibleOverlayVisible: (visible: boolean) => setVisibleOverlayVisible(visible),
|
||||||
copyCurrentSubtitle: () => copyCurrentSubtitle(),
|
copyCurrentSubtitle: () => copyCurrentSubtitle(),
|
||||||
startPendingMultiCopy: (timeoutMs: number) => startPendingMultiCopy(timeoutMs),
|
startPendingMultiCopy: (timeoutMs: number) => startPendingMultiCopy(timeoutMs),
|
||||||
@@ -3129,7 +3345,8 @@ const { createMainWindow: createMainWindowHandler, createModalWindow: createModa
|
|||||||
isDev,
|
isDev,
|
||||||
ensureOverlayWindowLevel: (window) => ensureOverlayWindowLevel(window),
|
ensureOverlayWindowLevel: (window) => ensureOverlayWindowLevel(window),
|
||||||
onRuntimeOptionsChanged: () => broadcastRuntimeOptionsChanged(),
|
onRuntimeOptionsChanged: () => broadcastRuntimeOptionsChanged(),
|
||||||
setOverlayDebugVisualizationEnabled: (enabled) => setOverlayDebugVisualizationEnabled(enabled),
|
setOverlayDebugVisualizationEnabled: (enabled) =>
|
||||||
|
setOverlayDebugVisualizationEnabled(enabled),
|
||||||
isOverlayVisible: (windowKind) =>
|
isOverlayVisible: (windowKind) =>
|
||||||
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
|
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
|
||||||
tryHandleOverlayShortcutLocalFallback: (input) =>
|
tryHandleOverlayShortcutLocalFallback: (input) =>
|
||||||
@@ -3144,7 +3361,7 @@ const { createMainWindow: createMainWindowHandler, createModalWindow: createModa
|
|||||||
},
|
},
|
||||||
setMainWindow: (window) => overlayManager.setMainWindow(window),
|
setMainWindow: (window) => overlayManager.setMainWindow(window),
|
||||||
setModalWindow: (window) => overlayManager.setModalWindow(window),
|
setModalWindow: (window) => overlayManager.setModalWindow(window),
|
||||||
});
|
});
|
||||||
const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
|
const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
|
||||||
createTrayRuntimeHandlers({
|
createTrayRuntimeHandlers({
|
||||||
resolveTrayIconPathDeps: {
|
resolveTrayIconPathDeps: {
|
||||||
@@ -3161,6 +3378,8 @@ const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
|
|||||||
initializeOverlayRuntime: () => initializeOverlayRuntime(),
|
initializeOverlayRuntime: () => initializeOverlayRuntime(),
|
||||||
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
|
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
|
||||||
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
|
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
|
||||||
|
showFirstRunSetup: () => !firstRunSetupService.isSetupCompleted(),
|
||||||
|
openFirstRunSetupWindow: () => openFirstRunSetupWindow(),
|
||||||
openYomitanSettings: () => openYomitanSettings(),
|
openYomitanSettings: () => openYomitanSettings(),
|
||||||
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
|
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
|
||||||
openJellyfinSetupWindow: () => openJellyfinSetupWindow(),
|
openJellyfinSetupWindow: () => openJellyfinSetupWindow(),
|
||||||
@@ -3189,7 +3408,7 @@ const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
|
|||||||
},
|
},
|
||||||
},
|
},
|
||||||
buildMenuFromTemplate: (template) => Menu.buildFromTemplate(template),
|
buildMenuFromTemplate: (template) => Menu.buildFromTemplate(template),
|
||||||
});
|
});
|
||||||
const yomitanExtensionRuntime = createYomitanExtensionRuntime({
|
const yomitanExtensionRuntime = createYomitanExtensionRuntime({
|
||||||
loadYomitanExtensionCore,
|
loadYomitanExtensionCore,
|
||||||
userDataPath: USER_DATA_PATH,
|
userDataPath: USER_DATA_PATH,
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ export interface AppLifecycleRuntimeDepsFactoryInput {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface AppReadyRuntimeDepsFactoryInput {
|
export interface AppReadyRuntimeDepsFactoryInput {
|
||||||
|
ensureDefaultConfigBootstrap: AppReadyRuntimeDeps['ensureDefaultConfigBootstrap'];
|
||||||
loadSubtitlePosition: AppReadyRuntimeDeps['loadSubtitlePosition'];
|
loadSubtitlePosition: AppReadyRuntimeDeps['loadSubtitlePosition'];
|
||||||
resolveKeybindings: AppReadyRuntimeDeps['resolveKeybindings'];
|
resolveKeybindings: AppReadyRuntimeDeps['resolveKeybindings'];
|
||||||
createMpvClient: AppReadyRuntimeDeps['createMpvClient'];
|
createMpvClient: AppReadyRuntimeDeps['createMpvClient'];
|
||||||
@@ -30,8 +31,12 @@ export interface AppReadyRuntimeDepsFactoryInput {
|
|||||||
setSecondarySubMode: AppReadyRuntimeDeps['setSecondarySubMode'];
|
setSecondarySubMode: AppReadyRuntimeDeps['setSecondarySubMode'];
|
||||||
defaultSecondarySubMode: AppReadyRuntimeDeps['defaultSecondarySubMode'];
|
defaultSecondarySubMode: AppReadyRuntimeDeps['defaultSecondarySubMode'];
|
||||||
defaultWebsocketPort: AppReadyRuntimeDeps['defaultWebsocketPort'];
|
defaultWebsocketPort: AppReadyRuntimeDeps['defaultWebsocketPort'];
|
||||||
|
defaultAnnotationWebsocketPort: AppReadyRuntimeDeps['defaultAnnotationWebsocketPort'];
|
||||||
|
defaultTexthookerPort: AppReadyRuntimeDeps['defaultTexthookerPort'];
|
||||||
hasMpvWebsocketPlugin: AppReadyRuntimeDeps['hasMpvWebsocketPlugin'];
|
hasMpvWebsocketPlugin: AppReadyRuntimeDeps['hasMpvWebsocketPlugin'];
|
||||||
startSubtitleWebsocket: AppReadyRuntimeDeps['startSubtitleWebsocket'];
|
startSubtitleWebsocket: AppReadyRuntimeDeps['startSubtitleWebsocket'];
|
||||||
|
startAnnotationWebsocket: AppReadyRuntimeDeps['startAnnotationWebsocket'];
|
||||||
|
startTexthooker: AppReadyRuntimeDeps['startTexthooker'];
|
||||||
log: AppReadyRuntimeDeps['log'];
|
log: AppReadyRuntimeDeps['log'];
|
||||||
setLogLevel: AppReadyRuntimeDeps['setLogLevel'];
|
setLogLevel: AppReadyRuntimeDeps['setLogLevel'];
|
||||||
createMecabTokenizerAndCheck: AppReadyRuntimeDeps['createMecabTokenizerAndCheck'];
|
createMecabTokenizerAndCheck: AppReadyRuntimeDeps['createMecabTokenizerAndCheck'];
|
||||||
@@ -39,6 +44,7 @@ export interface AppReadyRuntimeDepsFactoryInput {
|
|||||||
createImmersionTracker?: AppReadyRuntimeDeps['createImmersionTracker'];
|
createImmersionTracker?: AppReadyRuntimeDeps['createImmersionTracker'];
|
||||||
startJellyfinRemoteSession?: AppReadyRuntimeDeps['startJellyfinRemoteSession'];
|
startJellyfinRemoteSession?: AppReadyRuntimeDeps['startJellyfinRemoteSession'];
|
||||||
loadYomitanExtension: AppReadyRuntimeDeps['loadYomitanExtension'];
|
loadYomitanExtension: AppReadyRuntimeDeps['loadYomitanExtension'];
|
||||||
|
handleFirstRunSetup: AppReadyRuntimeDeps['handleFirstRunSetup'];
|
||||||
prewarmSubtitleDictionaries?: AppReadyRuntimeDeps['prewarmSubtitleDictionaries'];
|
prewarmSubtitleDictionaries?: AppReadyRuntimeDeps['prewarmSubtitleDictionaries'];
|
||||||
startBackgroundWarmups: AppReadyRuntimeDeps['startBackgroundWarmups'];
|
startBackgroundWarmups: AppReadyRuntimeDeps['startBackgroundWarmups'];
|
||||||
texthookerOnlyMode: AppReadyRuntimeDeps['texthookerOnlyMode'];
|
texthookerOnlyMode: AppReadyRuntimeDeps['texthookerOnlyMode'];
|
||||||
@@ -75,6 +81,7 @@ export function createAppReadyRuntimeDeps(
|
|||||||
params: AppReadyRuntimeDepsFactoryInput,
|
params: AppReadyRuntimeDepsFactoryInput,
|
||||||
): AppReadyRuntimeDeps {
|
): AppReadyRuntimeDeps {
|
||||||
return {
|
return {
|
||||||
|
ensureDefaultConfigBootstrap: params.ensureDefaultConfigBootstrap,
|
||||||
loadSubtitlePosition: params.loadSubtitlePosition,
|
loadSubtitlePosition: params.loadSubtitlePosition,
|
||||||
resolveKeybindings: params.resolveKeybindings,
|
resolveKeybindings: params.resolveKeybindings,
|
||||||
createMpvClient: params.createMpvClient,
|
createMpvClient: params.createMpvClient,
|
||||||
@@ -86,8 +93,12 @@ export function createAppReadyRuntimeDeps(
|
|||||||
setSecondarySubMode: params.setSecondarySubMode,
|
setSecondarySubMode: params.setSecondarySubMode,
|
||||||
defaultSecondarySubMode: params.defaultSecondarySubMode,
|
defaultSecondarySubMode: params.defaultSecondarySubMode,
|
||||||
defaultWebsocketPort: params.defaultWebsocketPort,
|
defaultWebsocketPort: params.defaultWebsocketPort,
|
||||||
|
defaultAnnotationWebsocketPort: params.defaultAnnotationWebsocketPort,
|
||||||
|
defaultTexthookerPort: params.defaultTexthookerPort,
|
||||||
hasMpvWebsocketPlugin: params.hasMpvWebsocketPlugin,
|
hasMpvWebsocketPlugin: params.hasMpvWebsocketPlugin,
|
||||||
startSubtitleWebsocket: params.startSubtitleWebsocket,
|
startSubtitleWebsocket: params.startSubtitleWebsocket,
|
||||||
|
startAnnotationWebsocket: params.startAnnotationWebsocket,
|
||||||
|
startTexthooker: params.startTexthooker,
|
||||||
log: params.log,
|
log: params.log,
|
||||||
setLogLevel: params.setLogLevel,
|
setLogLevel: params.setLogLevel,
|
||||||
createMecabTokenizerAndCheck: params.createMecabTokenizerAndCheck,
|
createMecabTokenizerAndCheck: params.createMecabTokenizerAndCheck,
|
||||||
@@ -95,6 +106,7 @@ export function createAppReadyRuntimeDeps(
|
|||||||
createImmersionTracker: params.createImmersionTracker,
|
createImmersionTracker: params.createImmersionTracker,
|
||||||
startJellyfinRemoteSession: params.startJellyfinRemoteSession,
|
startJellyfinRemoteSession: params.startJellyfinRemoteSession,
|
||||||
loadYomitanExtension: params.loadYomitanExtension,
|
loadYomitanExtension: params.loadYomitanExtension,
|
||||||
|
handleFirstRunSetup: params.handleFirstRunSetup,
|
||||||
prewarmSubtitleDictionaries: params.prewarmSubtitleDictionaries,
|
prewarmSubtitleDictionaries: params.prewarmSubtitleDictionaries,
|
||||||
startBackgroundWarmups: params.startBackgroundWarmups,
|
startBackgroundWarmups: params.startBackgroundWarmups,
|
||||||
texthookerOnlyMode: params.texthookerOnlyMode,
|
texthookerOnlyMode: params.texthookerOnlyMode,
|
||||||
|
|||||||
@@ -220,8 +220,9 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
|
|||||||
(c as { tag?: string }).tag === 'details' &&
|
(c as { tag?: string }).tag === 'details' &&
|
||||||
Array.isArray((c as { content?: unknown[] }).content) &&
|
Array.isArray((c as { content?: unknown[] }).content) &&
|
||||||
(c as { content: Array<{ content?: string }> }).content[0]?.content === 'Description',
|
(c as { content: Array<{ content?: string }> }).content[0]?.content === 'Description',
|
||||||
) as { tag: string; content: Array<Record<string, unknown>> } | undefined;
|
) as { tag: string; open?: boolean; content: Array<Record<string, unknown>> } | undefined;
|
||||||
assert.ok(descSection, 'expected Description collapsible section');
|
assert.ok(descSection, 'expected Description collapsible section');
|
||||||
|
assert.equal(descSection.open, false);
|
||||||
const descBody = descSection.content[1] as { content: string };
|
const descBody = descSection.content[1] as { content: string };
|
||||||
assert.ok(
|
assert.ok(
|
||||||
descBody.content.includes('Alexia Midgar is the second princess of the Kingdom of Midgar.'),
|
descBody.content.includes('Alexia Midgar is the second princess of the Kingdom of Midgar.'),
|
||||||
@@ -233,11 +234,12 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
|
|||||||
Array.isArray((c as { content?: unknown[] }).content) &&
|
Array.isArray((c as { content?: unknown[] }).content) &&
|
||||||
(c as { content: Array<{ content?: string }> }).content[0]?.content ===
|
(c as { content: Array<{ content?: string }> }).content[0]?.content ===
|
||||||
'Character Information',
|
'Character Information',
|
||||||
) as { tag: string; content: Array<Record<string, unknown>> } | undefined;
|
) as { tag: string; open?: boolean; content: Array<Record<string, unknown>> } | undefined;
|
||||||
assert.ok(
|
assert.ok(
|
||||||
infoSection,
|
infoSection,
|
||||||
'expected Character Information collapsible section with parsed __Race:__ field',
|
'expected Character Information collapsible section with parsed __Race:__ field',
|
||||||
);
|
);
|
||||||
|
assert.equal(infoSection.open, false);
|
||||||
|
|
||||||
const topLevelImageGlossaryEntry = glossary.find(
|
const topLevelImageGlossaryEntry = glossary.find(
|
||||||
(item) =>
|
(item) =>
|
||||||
@@ -249,6 +251,330 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia applies configured open states to character dictionary sections', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'The Eminence in Shadow',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
romaji: 'The Eminence in Shadow',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'SUPPORTING',
|
||||||
|
voiceActors: [
|
||||||
|
{
|
||||||
|
id: 456,
|
||||||
|
name: {
|
||||||
|
full: 'Rina Hidaka',
|
||||||
|
native: '日高里菜',
|
||||||
|
},
|
||||||
|
image: {
|
||||||
|
medium: 'https://cdn.example.com/va-456.jpg',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
node: {
|
||||||
|
id: 123,
|
||||||
|
description:
|
||||||
|
'Alexia Midgar is the second princess of the Kingdom of Midgar.\n\n__Race:__ Human',
|
||||||
|
image: {
|
||||||
|
large: 'https://cdn.example.com/character-123.png',
|
||||||
|
medium: 'https://cdn.example.com/character-123-small.png',
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
full: 'Alexia Midgar',
|
||||||
|
native: 'アレクシア・ミドガル',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url === 'https://cdn.example.com/character-123.png') {
|
||||||
|
return new Response(Buffer.from([0x89, 0x50, 0x4e, 0x47]), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'image/png' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url === 'https://cdn.example.com/va-456.jpg') {
|
||||||
|
return new Response(Buffer.from([0xff, 0xd8, 0xff, 0xd9]), {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'image/jpeg' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
getCollapsibleSectionOpenState: (section) =>
|
||||||
|
section === 'description' || section === 'voicedBy',
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
const alexia = termBank.find(([term]) => term === 'アレクシア');
|
||||||
|
assert.ok(alexia);
|
||||||
|
|
||||||
|
const glossary = alexia[5];
|
||||||
|
const entry = glossary[0] as {
|
||||||
|
type: string;
|
||||||
|
content: { tag: string; content: Array<Record<string, unknown>> };
|
||||||
|
};
|
||||||
|
const children = entry.content.content;
|
||||||
|
|
||||||
|
const getSection = (title: string) =>
|
||||||
|
children.find(
|
||||||
|
(c) =>
|
||||||
|
(c as { tag?: string }).tag === 'details' &&
|
||||||
|
Array.isArray((c as { content?: unknown[] }).content) &&
|
||||||
|
(c as { content: Array<{ content?: string }> }).content[0]?.content === title,
|
||||||
|
) as { open?: boolean } | undefined;
|
||||||
|
|
||||||
|
assert.equal(getSection('Description')?.open, true);
|
||||||
|
assert.equal(getSection('Character Information')?.open, false);
|
||||||
|
assert.equal(getSection('Voiced by')?.open, true);
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia reapplies collapsible open states when using cached snapshot data', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'The Eminence in Shadow',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'SUPPORTING',
|
||||||
|
voiceActors: [
|
||||||
|
{
|
||||||
|
id: 456,
|
||||||
|
name: {
|
||||||
|
full: 'Rina Hidaka',
|
||||||
|
native: '日高里菜',
|
||||||
|
},
|
||||||
|
image: {
|
||||||
|
medium: 'https://cdn.example.com/va-456.jpg',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
node: {
|
||||||
|
id: 123,
|
||||||
|
description:
|
||||||
|
'Alexia Midgar is the second princess of the Kingdom of Midgar.\n\n__Race:__ Human',
|
||||||
|
image: {
|
||||||
|
large: 'https://cdn.example.com/character-123.png',
|
||||||
|
medium: null,
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
full: 'Alexia Midgar',
|
||||||
|
native: 'アレクシア・ミドガル',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url === 'https://cdn.example.com/character-123.png') {
|
||||||
|
return new Response(PNG_1X1, {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'image/png' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url === 'https://cdn.example.com/va-456.jpg') {
|
||||||
|
return new Response(PNG_1X1, {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'image/png' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtimeOpen = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
getCollapsibleSectionOpenState: () => true,
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
await runtimeOpen.generateForCurrentMedia();
|
||||||
|
|
||||||
|
const runtimeClosed = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
getCollapsibleSectionOpenState: () => false,
|
||||||
|
now: () => 1_700_000_000_500,
|
||||||
|
});
|
||||||
|
const result = await runtimeClosed.generateForCurrentMedia();
|
||||||
|
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
const alexia = termBank.find(([term]) => term === 'アレクシア');
|
||||||
|
assert.ok(alexia);
|
||||||
|
|
||||||
|
const children = (
|
||||||
|
alexia[5][0] as {
|
||||||
|
content: { content: Array<Record<string, unknown>> };
|
||||||
|
}
|
||||||
|
).content.content;
|
||||||
|
const sections = children.filter(
|
||||||
|
(item) => (item as { tag?: string }).tag === 'details',
|
||||||
|
) as Array<{
|
||||||
|
open?: boolean;
|
||||||
|
}>;
|
||||||
|
assert.ok(sections.length >= 2);
|
||||||
|
assert.ok(sections.every((section) => section.open === false));
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('generateForCurrentMedia adds kana aliases for romanized names when native name is kanji', async () => {
|
test('generateForCurrentMedia adds kana aliases for romanized names when native name is kanji', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
@@ -369,6 +695,123 @@ test('generateForCurrentMedia adds kana aliases for romanized names when native
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia indexes AniList alternative character names for alias lookups', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 321,
|
||||||
|
description: 'Leader of Shadow Garden.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Cid Kagenou',
|
||||||
|
native: 'シド・カゲノー',
|
||||||
|
alternative: ['Shadow', 'Minoru Kagenou'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
const shadowKana = termBank.find(([term]) => term === 'シャドウ');
|
||||||
|
assert.ok(shadowKana, 'expected katakana alias from AniList alternative name');
|
||||||
|
assert.equal(shadowKana[1], 'しゃどう');
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data', async () => {
|
test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
@@ -1158,6 +1601,308 @@ test('buildMergedDictionary combines stored snapshots into one stable dictionary
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('buildMergedDictionary rebuilds snapshots written with an older format version', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
let characterQueryCount = 0;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
variables?: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
characterQueryCount += 1;
|
||||||
|
assert.equal(body.variables?.id, 130298);
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 111,
|
||||||
|
description: 'Leader of Shadow Garden.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Cid Kagenou',
|
||||||
|
native: 'シド・カゲノー',
|
||||||
|
alternative: ['Shadow'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const snapshotsDir = path.join(userDataPath, 'character-dictionaries', 'snapshots');
|
||||||
|
fs.mkdirSync(snapshotsDir, { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(snapshotsDir, 'anilist-130298.json'),
|
||||||
|
JSON.stringify({
|
||||||
|
formatVersion: 12,
|
||||||
|
mediaId: 130298,
|
||||||
|
mediaTitle: 'The Eminence in Shadow',
|
||||||
|
entryCount: 1,
|
||||||
|
updatedAt: 1_700_000_000_000,
|
||||||
|
termEntries: [['stale', '', 'name main', '', 100, ['stale'], 0, '']],
|
||||||
|
images: [],
|
||||||
|
}),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => null,
|
||||||
|
getCurrentMediaTitle: () => null,
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => null,
|
||||||
|
now: () => 1_700_000_000_100,
|
||||||
|
});
|
||||||
|
|
||||||
|
const merged = await runtime.buildMergedDictionary([130298]);
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(merged.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
assert.equal(characterQueryCount, 1);
|
||||||
|
assert.ok(termBank.find(([term]) => term === 'シャドウ'));
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('buildMergedDictionary reapplies collapsible open states from current config', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
const current = { title: 'The Eminence in Shadow', episode: 5 };
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
variables?: Record<string, unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
if (body.variables?.search === 'The Eminence in Shadow') {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 21,
|
||||||
|
episodes: 28,
|
||||||
|
title: {
|
||||||
|
english: 'Frieren: Beyond Journey’s End',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
const mediaId = Number(body.variables?.id);
|
||||||
|
if (mediaId === 130298) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: { english: 'The Eminence in Shadow' },
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 111,
|
||||||
|
description: 'Leader of Shadow Garden.',
|
||||||
|
image: {
|
||||||
|
large: 'https://example.com/alpha.png',
|
||||||
|
medium: null,
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
full: 'Alpha',
|
||||||
|
native: 'アルファ',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: { english: 'Frieren: Beyond Journey’s End' },
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 222,
|
||||||
|
description: 'Elven mage.',
|
||||||
|
image: {
|
||||||
|
large: 'https://example.com/frieren.png',
|
||||||
|
medium: null,
|
||||||
|
},
|
||||||
|
name: {
|
||||||
|
full: 'Frieren',
|
||||||
|
native: 'フリーレン',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (url === 'https://example.com/alpha.png' || url === 'https://example.com/frieren.png') {
|
||||||
|
return new Response(PNG_1X1, {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'image/png' },
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtimeOpen = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/current.mkv',
|
||||||
|
getCurrentMediaTitle: () => current.title,
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: current.title,
|
||||||
|
episode: current.episode,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
getCollapsibleSectionOpenState: () => true,
|
||||||
|
now: () => 1_700_000_000_100,
|
||||||
|
});
|
||||||
|
|
||||||
|
await runtimeOpen.getOrCreateCurrentSnapshot();
|
||||||
|
current.title = 'Frieren: Beyond Journey’s End';
|
||||||
|
current.episode = 1;
|
||||||
|
await runtimeOpen.getOrCreateCurrentSnapshot();
|
||||||
|
|
||||||
|
const runtimeClosed = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/current.mkv',
|
||||||
|
getCurrentMediaTitle: () => current.title,
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: current.title,
|
||||||
|
episode: current.episode,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
getCollapsibleSectionOpenState: () => false,
|
||||||
|
now: () => 1_700_000_000_200,
|
||||||
|
});
|
||||||
|
|
||||||
|
const merged = await runtimeClosed.buildMergedDictionary([21, 130298]);
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(merged.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
const alpha = termBank.find(([term]) => term === 'アルファ');
|
||||||
|
assert.ok(alpha);
|
||||||
|
const children = (
|
||||||
|
alpha[5][0] as {
|
||||||
|
content: { content: Array<Record<string, unknown>> };
|
||||||
|
}
|
||||||
|
).content.content;
|
||||||
|
const sections = children.filter(
|
||||||
|
(item) => (item as { tag?: string }).tag === 'details',
|
||||||
|
) as Array<{
|
||||||
|
open?: boolean;
|
||||||
|
}>;
|
||||||
|
assert.ok(sections.length >= 1);
|
||||||
|
assert.ok(sections.every((section) => section.open === false));
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('generateForCurrentMedia paces AniList requests and character image downloads', async () => {
|
test('generateForCurrentMedia paces AniList requests and character image downloads', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import * as os from 'os';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import { createHash } from 'node:crypto';
|
import { createHash } from 'node:crypto';
|
||||||
import type { AnilistMediaGuess } from '../core/services/anilist/anilist-updater';
|
import type { AnilistMediaGuess } from '../core/services/anilist/anilist-updater';
|
||||||
|
import type { AnilistCharacterDictionaryCollapsibleSectionKey } from '../types';
|
||||||
import { hasVideoExtension } from '../shared/video-extensions';
|
import { hasVideoExtension } from '../shared/video-extensions';
|
||||||
|
|
||||||
const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
|
const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
|
||||||
@@ -54,7 +55,7 @@ export type CharacterDictionarySnapshot = {
|
|||||||
images: CharacterDictionarySnapshotImage[];
|
images: CharacterDictionarySnapshotImage[];
|
||||||
};
|
};
|
||||||
|
|
||||||
const CHARACTER_DICTIONARY_FORMAT_VERSION = 12;
|
const CHARACTER_DICTIONARY_FORMAT_VERSION = 14;
|
||||||
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
|
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
|
||||||
|
|
||||||
type AniListSearchResponse = {
|
type AniListSearchResponse = {
|
||||||
@@ -105,6 +106,7 @@ type AniListCharacterPageResponse = {
|
|||||||
name?: {
|
name?: {
|
||||||
full?: string | null;
|
full?: string | null;
|
||||||
native?: string | null;
|
native?: string | null;
|
||||||
|
alternative?: Array<string | null> | null;
|
||||||
} | null;
|
} | null;
|
||||||
} | null;
|
} | null;
|
||||||
} | null>;
|
} | null>;
|
||||||
@@ -124,6 +126,7 @@ type CharacterRecord = {
|
|||||||
role: CharacterDictionaryRole;
|
role: CharacterDictionaryRole;
|
||||||
fullName: string;
|
fullName: string;
|
||||||
nativeName: string;
|
nativeName: string;
|
||||||
|
alternativeNames: string[];
|
||||||
description: string;
|
description: string;
|
||||||
imageUrl: string | null;
|
imageUrl: string | null;
|
||||||
voiceActors: VoiceActorRecord[];
|
voiceActors: VoiceActorRecord[];
|
||||||
@@ -178,6 +181,9 @@ export interface CharacterDictionaryRuntimeDeps {
|
|||||||
sleep?: (ms: number) => Promise<void>;
|
sleep?: (ms: number) => Promise<void>;
|
||||||
logInfo?: (message: string) => void;
|
logInfo?: (message: string) => void;
|
||||||
logWarn?: (message: string) => void;
|
logWarn?: (message: string) => void;
|
||||||
|
getCollapsibleSectionOpenState?: (
|
||||||
|
section: AnilistCharacterDictionaryCollapsibleSectionKey,
|
||||||
|
) => boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
type ResolvedAniListMedia = {
|
type ResolvedAniListMedia = {
|
||||||
@@ -423,6 +429,7 @@ const ROMANIZED_KANA_MONOGRAPHS: ReadonlyArray<[string, string]> = [
|
|||||||
['re', 'レ'],
|
['re', 'レ'],
|
||||||
['ro', 'ロ'],
|
['ro', 'ロ'],
|
||||||
['wa', 'ワ'],
|
['wa', 'ワ'],
|
||||||
|
['w', 'ウ'],
|
||||||
['wo', 'ヲ'],
|
['wo', 'ヲ'],
|
||||||
['n', 'ン'],
|
['n', 'ン'],
|
||||||
];
|
];
|
||||||
@@ -490,12 +497,34 @@ function addRomanizedKanaAliases(values: Iterable<string>): string[] {
|
|||||||
return [...aliases];
|
return [...aliases];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function expandRawNameVariants(rawName: string): string[] {
|
||||||
|
const trimmed = rawName.trim();
|
||||||
|
if (!trimmed) return [];
|
||||||
|
|
||||||
|
const variants = new Set<string>([trimmed]);
|
||||||
|
const outer = trimmed
|
||||||
|
.replace(/[((][^()()]+[))]/g, ' ')
|
||||||
|
.replace(/\s+/g, ' ')
|
||||||
|
.trim();
|
||||||
|
if (outer && outer !== trimmed) {
|
||||||
|
variants.add(outer);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const match of trimmed.matchAll(/[((]([^()()]+)[))]/g)) {
|
||||||
|
const inner = match[1]?.trim() || '';
|
||||||
|
if (inner) {
|
||||||
|
variants.add(inner);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...variants];
|
||||||
|
}
|
||||||
|
|
||||||
function buildNameTerms(character: CharacterRecord): string[] {
|
function buildNameTerms(character: CharacterRecord): string[] {
|
||||||
const base = new Set<string>();
|
const base = new Set<string>();
|
||||||
const rawNames = [character.nativeName, character.fullName];
|
const rawNames = [character.nativeName, character.fullName, ...character.alternativeNames];
|
||||||
for (const rawName of rawNames) {
|
for (const rawName of rawNames) {
|
||||||
const name = rawName.trim();
|
for (const name of expandRawNameVariants(rawName)) {
|
||||||
if (!name) continue;
|
|
||||||
base.add(name);
|
base.add(name);
|
||||||
|
|
||||||
const compact = name.replace(/[\s\u3000]+/g, '');
|
const compact = name.replace(/[\s\u3000]+/g, '');
|
||||||
@@ -524,6 +553,7 @@ function buildNameTerms(character: CharacterRecord): string[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const withHonorifics = new Set<string>();
|
const withHonorifics = new Set<string>();
|
||||||
for (const entry of base) {
|
for (const entry of base) {
|
||||||
@@ -758,11 +788,12 @@ function roleBadgeStyle(role: CharacterDictionaryRole): Record<string, string> {
|
|||||||
|
|
||||||
function buildCollapsibleSection(
|
function buildCollapsibleSection(
|
||||||
title: string,
|
title: string,
|
||||||
|
open: boolean,
|
||||||
body: Array<string | Record<string, unknown>> | string | Record<string, unknown>,
|
body: Array<string | Record<string, unknown>> | string | Record<string, unknown>,
|
||||||
): Record<string, unknown> {
|
): Record<string, unknown> {
|
||||||
return {
|
return {
|
||||||
tag: 'details',
|
tag: 'details',
|
||||||
open: true,
|
open,
|
||||||
style: { marginTop: '0.4em' },
|
style: { marginTop: '0.4em' },
|
||||||
content: [
|
content: [
|
||||||
{
|
{
|
||||||
@@ -849,6 +880,9 @@ function createDefinitionGlossary(
|
|||||||
mediaTitle: string,
|
mediaTitle: string,
|
||||||
imagePath: string | null,
|
imagePath: string | null,
|
||||||
vaImagePaths: Map<number, string>,
|
vaImagePaths: Map<number, string>,
|
||||||
|
getCollapsibleSectionOpenState: (
|
||||||
|
section: AnilistCharacterDictionaryCollapsibleSectionKey,
|
||||||
|
) => boolean,
|
||||||
): CharacterDictionaryGlossaryEntry[] {
|
): CharacterDictionaryGlossaryEntry[] {
|
||||||
const displayName = character.nativeName || character.fullName || `Character ${character.id}`;
|
const displayName = character.nativeName || character.fullName || `Character ${character.id}`;
|
||||||
const secondaryName =
|
const secondaryName =
|
||||||
@@ -910,7 +944,13 @@ function createDefinitionGlossary(
|
|||||||
});
|
});
|
||||||
|
|
||||||
if (descriptionText) {
|
if (descriptionText) {
|
||||||
content.push(buildCollapsibleSection('Description', descriptionText));
|
content.push(
|
||||||
|
buildCollapsibleSection(
|
||||||
|
'Description',
|
||||||
|
getCollapsibleSectionOpenState('description'),
|
||||||
|
descriptionText,
|
||||||
|
),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fields.length > 0) {
|
if (fields.length > 0) {
|
||||||
@@ -919,11 +959,15 @@ function createDefinitionGlossary(
|
|||||||
content: `${f.key}: ${f.value}`,
|
content: `${f.key}: ${f.value}`,
|
||||||
}));
|
}));
|
||||||
content.push(
|
content.push(
|
||||||
buildCollapsibleSection('Character Information', {
|
buildCollapsibleSection(
|
||||||
|
'Character Information',
|
||||||
|
getCollapsibleSectionOpenState('characterInformation'),
|
||||||
|
{
|
||||||
tag: 'ul',
|
tag: 'ul',
|
||||||
style: { marginTop: '0.15em' },
|
style: { marginTop: '0.15em' },
|
||||||
content: fieldItems,
|
content: fieldItems,
|
||||||
}),
|
},
|
||||||
|
),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -931,6 +975,7 @@ function createDefinitionGlossary(
|
|||||||
content.push(
|
content.push(
|
||||||
buildCollapsibleSection(
|
buildCollapsibleSection(
|
||||||
'Voiced by',
|
'Voiced by',
|
||||||
|
getCollapsibleSectionOpenState('voicedBy'),
|
||||||
buildVoicedByContent(character.voiceActors, vaImagePaths),
|
buildVoicedByContent(character.voiceActors, vaImagePaths),
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
@@ -1210,6 +1255,7 @@ async function fetchCharactersForMedia(
|
|||||||
name {
|
name {
|
||||||
full
|
full
|
||||||
native
|
native
|
||||||
|
alternative
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1243,7 +1289,15 @@ async function fetchCharactersForMedia(
|
|||||||
if (!node || typeof node.id !== 'number') continue;
|
if (!node || typeof node.id !== 'number') continue;
|
||||||
const fullName = node.name?.full?.trim() || '';
|
const fullName = node.name?.full?.trim() || '';
|
||||||
const nativeName = node.name?.native?.trim() || '';
|
const nativeName = node.name?.native?.trim() || '';
|
||||||
if (!fullName && !nativeName) continue;
|
const alternativeNames = [
|
||||||
|
...new Set(
|
||||||
|
(node.name?.alternative ?? [])
|
||||||
|
.filter((value): value is string => typeof value === 'string')
|
||||||
|
.map((value) => value.trim())
|
||||||
|
.filter((value) => value.length > 0),
|
||||||
|
),
|
||||||
|
];
|
||||||
|
if (!fullName && !nativeName && alternativeNames.length === 0) continue;
|
||||||
const voiceActors: VoiceActorRecord[] = [];
|
const voiceActors: VoiceActorRecord[] = [];
|
||||||
for (const va of edge?.voiceActors ?? []) {
|
for (const va of edge?.voiceActors ?? []) {
|
||||||
if (!va || typeof va.id !== 'number') continue;
|
if (!va || typeof va.id !== 'number') continue;
|
||||||
@@ -1262,6 +1316,7 @@ async function fetchCharactersForMedia(
|
|||||||
role: mapRole(edge?.role),
|
role: mapRole(edge?.role),
|
||||||
fullName,
|
fullName,
|
||||||
nativeName,
|
nativeName,
|
||||||
|
alternativeNames,
|
||||||
description: node.description || '',
|
description: node.description || '',
|
||||||
imageUrl: node.image?.large || node.image?.medium || null,
|
imageUrl: node.image?.large || node.image?.medium || null,
|
||||||
voiceActors,
|
voiceActors,
|
||||||
@@ -1340,6 +1395,9 @@ function buildSnapshotFromCharacters(
|
|||||||
imagesByCharacterId: Map<number, CharacterDictionarySnapshotImage>,
|
imagesByCharacterId: Map<number, CharacterDictionarySnapshotImage>,
|
||||||
imagesByVaId: Map<number, CharacterDictionarySnapshotImage>,
|
imagesByVaId: Map<number, CharacterDictionarySnapshotImage>,
|
||||||
updatedAt: number,
|
updatedAt: number,
|
||||||
|
getCollapsibleSectionOpenState: (
|
||||||
|
section: AnilistCharacterDictionaryCollapsibleSectionKey,
|
||||||
|
) => boolean,
|
||||||
): CharacterDictionarySnapshot {
|
): CharacterDictionarySnapshot {
|
||||||
const termEntries: CharacterDictionaryTermEntry[] = [];
|
const termEntries: CharacterDictionaryTermEntry[] = [];
|
||||||
const seen = new Set<string>();
|
const seen = new Set<string>();
|
||||||
@@ -1351,7 +1409,13 @@ function buildSnapshotFromCharacters(
|
|||||||
const vaImg = imagesByVaId.get(va.id);
|
const vaImg = imagesByVaId.get(va.id);
|
||||||
if (vaImg) vaImagePaths.set(va.id, vaImg.path);
|
if (vaImg) vaImagePaths.set(va.id, vaImg.path);
|
||||||
}
|
}
|
||||||
const glossary = createDefinitionGlossary(character, mediaTitle, imagePath, vaImagePaths);
|
const glossary = createDefinitionGlossary(
|
||||||
|
character,
|
||||||
|
mediaTitle,
|
||||||
|
imagePath,
|
||||||
|
vaImagePaths,
|
||||||
|
getCollapsibleSectionOpenState,
|
||||||
|
);
|
||||||
const candidateTerms = buildNameTerms(character);
|
const candidateTerms = buildNameTerms(character);
|
||||||
for (const term of candidateTerms) {
|
for (const term of candidateTerms) {
|
||||||
const reading = buildReading(term);
|
const reading = buildReading(term);
|
||||||
@@ -1377,6 +1441,67 @@ function buildSnapshotFromCharacters(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getCollapsibleSectionKeyFromTitle(
|
||||||
|
title: string,
|
||||||
|
): AnilistCharacterDictionaryCollapsibleSectionKey | null {
|
||||||
|
if (title === 'Description') return 'description';
|
||||||
|
if (title === 'Character Information') return 'characterInformation';
|
||||||
|
if (title === 'Voiced by') return 'voicedBy';
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyCollapsibleOpenStatesToStructuredValue(
|
||||||
|
value: unknown,
|
||||||
|
getCollapsibleSectionOpenState: (
|
||||||
|
section: AnilistCharacterDictionaryCollapsibleSectionKey,
|
||||||
|
) => boolean,
|
||||||
|
): unknown {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
return value.map((item) =>
|
||||||
|
applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!value || typeof value !== 'object') {
|
||||||
|
return value;
|
||||||
|
}
|
||||||
|
|
||||||
|
const record = value as Record<string, unknown>;
|
||||||
|
const next: Record<string, unknown> = {};
|
||||||
|
for (const [key, child] of Object.entries(record)) {
|
||||||
|
next[key] = applyCollapsibleOpenStatesToStructuredValue(child, getCollapsibleSectionOpenState);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (record.tag === 'details') {
|
||||||
|
const content = Array.isArray(record.content) ? record.content : [];
|
||||||
|
const summary = content[0];
|
||||||
|
if (summary && typeof summary === 'object' && !Array.isArray(summary)) {
|
||||||
|
const summaryContent = (summary as Record<string, unknown>).content;
|
||||||
|
if (typeof summaryContent === 'string') {
|
||||||
|
const section = getCollapsibleSectionKeyFromTitle(summaryContent);
|
||||||
|
if (section) {
|
||||||
|
next.open = getCollapsibleSectionOpenState(section);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return next;
|
||||||
|
}
|
||||||
|
|
||||||
|
function applyCollapsibleOpenStatesToTermEntries(
|
||||||
|
termEntries: CharacterDictionaryTermEntry[],
|
||||||
|
getCollapsibleSectionOpenState: (
|
||||||
|
section: AnilistCharacterDictionaryCollapsibleSectionKey,
|
||||||
|
) => boolean,
|
||||||
|
): CharacterDictionaryTermEntry[] {
|
||||||
|
return termEntries.map((entry) => {
|
||||||
|
const glossary = entry[5].map((item) =>
|
||||||
|
applyCollapsibleOpenStatesToStructuredValue(item, getCollapsibleSectionOpenState),
|
||||||
|
) as CharacterDictionaryGlossaryEntry[];
|
||||||
|
return [...entry.slice(0, 5), glossary, ...entry.slice(6)] as CharacterDictionaryTermEntry;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
function buildDictionaryZip(
|
function buildDictionaryZip(
|
||||||
outputPath: string,
|
outputPath: string,
|
||||||
dictionaryTitle: string,
|
dictionaryTitle: string,
|
||||||
@@ -1444,6 +1569,7 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
} {
|
} {
|
||||||
const outputDir = path.join(deps.userDataPath, 'character-dictionaries');
|
const outputDir = path.join(deps.userDataPath, 'character-dictionaries');
|
||||||
const sleepMs = deps.sleep ?? sleep;
|
const sleepMs = deps.sleep ?? sleep;
|
||||||
|
const getCollapsibleSectionOpenState = deps.getCollapsibleSectionOpenState ?? (() => false);
|
||||||
|
|
||||||
const resolveCurrentMedia = async (
|
const resolveCurrentMedia = async (
|
||||||
targetPath?: string,
|
targetPath?: string,
|
||||||
@@ -1557,6 +1683,7 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
imagesByCharacterId,
|
imagesByCharacterId,
|
||||||
imagesByVaId,
|
imagesByVaId,
|
||||||
deps.now(),
|
deps.now(),
|
||||||
|
getCollapsibleSectionOpenState,
|
||||||
);
|
);
|
||||||
writeSnapshot(snapshotPath, snapshot);
|
writeSnapshot(snapshotPath, snapshot);
|
||||||
deps.logInfo?.(
|
deps.logInfo?.(
|
||||||
@@ -1589,7 +1716,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
const normalizedMediaIds = mediaIds
|
const normalizedMediaIds = mediaIds
|
||||||
.filter((mediaId) => Number.isFinite(mediaId) && mediaId > 0)
|
.filter((mediaId) => Number.isFinite(mediaId) && mediaId > 0)
|
||||||
.map((mediaId) => Math.floor(mediaId));
|
.map((mediaId) => Math.floor(mediaId));
|
||||||
const snapshots = normalizedMediaIds.map((mediaId) => {
|
const snapshotResults = await Promise.all(
|
||||||
|
normalizedMediaIds.map((mediaId) => getOrCreateSnapshot(mediaId)),
|
||||||
|
);
|
||||||
|
const snapshots = snapshotResults.map(({ mediaId }) => {
|
||||||
const snapshot = readSnapshot(getSnapshotPath(outputDir, mediaId));
|
const snapshot = readSnapshot(getSnapshotPath(outputDir, mediaId));
|
||||||
if (!snapshot) {
|
if (!snapshot) {
|
||||||
throw new Error(`Missing character dictionary snapshot for AniList ${mediaId}.`);
|
throw new Error(`Missing character dictionary snapshot for AniList ${mediaId}.`);
|
||||||
@@ -1606,7 +1736,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
CHARACTER_DICTIONARY_MERGED_TITLE,
|
CHARACTER_DICTIONARY_MERGED_TITLE,
|
||||||
description,
|
description,
|
||||||
revision,
|
revision,
|
||||||
|
applyCollapsibleOpenStatesToTermEntries(
|
||||||
snapshots.flatMap((snapshot) => snapshot.termEntries),
|
snapshots.flatMap((snapshot) => snapshot.termEntries),
|
||||||
|
getCollapsibleSectionOpenState,
|
||||||
|
),
|
||||||
snapshots.flatMap((snapshot) => snapshot.images),
|
snapshots.flatMap((snapshot) => snapshot.images),
|
||||||
);
|
);
|
||||||
deps.logInfo?.(
|
deps.logInfo?.(
|
||||||
@@ -1651,7 +1784,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
dictionaryTitle,
|
dictionaryTitle,
|
||||||
description,
|
description,
|
||||||
revision,
|
revision,
|
||||||
|
applyCollapsibleOpenStatesToTermEntries(
|
||||||
storedSnapshot.termEntries,
|
storedSnapshot.termEntries,
|
||||||
|
getCollapsibleSectionOpenState,
|
||||||
|
),
|
||||||
storedSnapshot.images,
|
storedSnapshot.images,
|
||||||
);
|
);
|
||||||
deps.logInfo?.(
|
deps.logInfo?.(
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ export interface CliCommandRuntimeServiceContext {
|
|||||||
isOverlayInitialized: () => boolean;
|
isOverlayInitialized: () => boolean;
|
||||||
initializeOverlay: () => void;
|
initializeOverlay: () => void;
|
||||||
toggleVisibleOverlay: () => void;
|
toggleVisibleOverlay: () => void;
|
||||||
|
openFirstRunSetup: () => void;
|
||||||
setVisibleOverlay: (visible: boolean) => void;
|
setVisibleOverlay: (visible: boolean) => void;
|
||||||
copyCurrentSubtitle: () => void;
|
copyCurrentSubtitle: () => void;
|
||||||
startPendingMultiCopy: (timeoutMs: number) => void;
|
startPendingMultiCopy: (timeoutMs: number) => void;
|
||||||
@@ -103,6 +104,7 @@ function createCliCommandDepsFromContext(
|
|||||||
runCommand: context.runJellyfinCommand,
|
runCommand: context.runJellyfinCommand,
|
||||||
},
|
},
|
||||||
ui: {
|
ui: {
|
||||||
|
openFirstRunSetup: context.openFirstRunSetup,
|
||||||
openYomitanSettings: context.openYomitanSettings,
|
openYomitanSettings: context.openYomitanSettings,
|
||||||
cycleSecondarySubMode: context.cycleSecondarySubMode,
|
cycleSecondarySubMode: context.cycleSecondarySubMode,
|
||||||
openRuntimeOptionsPalette: context.openRuntimeOptionsPalette,
|
openRuntimeOptionsPalette: context.openRuntimeOptionsPalette,
|
||||||
|
|||||||
@@ -159,6 +159,7 @@ export interface CliCommandRuntimeServiceDepsParams {
|
|||||||
runCommand: CliCommandDepsRuntimeOptions['jellyfin']['runCommand'];
|
runCommand: CliCommandDepsRuntimeOptions['jellyfin']['runCommand'];
|
||||||
};
|
};
|
||||||
ui: {
|
ui: {
|
||||||
|
openFirstRunSetup: CliCommandDepsRuntimeOptions['ui']['openFirstRunSetup'];
|
||||||
openYomitanSettings: CliCommandDepsRuntimeOptions['ui']['openYomitanSettings'];
|
openYomitanSettings: CliCommandDepsRuntimeOptions['ui']['openYomitanSettings'];
|
||||||
cycleSecondarySubMode: CliCommandDepsRuntimeOptions['ui']['cycleSecondarySubMode'];
|
cycleSecondarySubMode: CliCommandDepsRuntimeOptions['ui']['cycleSecondarySubMode'];
|
||||||
openRuntimeOptionsPalette: CliCommandDepsRuntimeOptions['ui']['openRuntimeOptionsPalette'];
|
openRuntimeOptionsPalette: CliCommandDepsRuntimeOptions['ui']['openRuntimeOptionsPalette'];
|
||||||
@@ -185,6 +186,7 @@ export interface MpvCommandRuntimeServiceDepsParams {
|
|||||||
mpvPlayNextSubtitle: HandleMpvCommandFromIpcOptions['mpvPlayNextSubtitle'];
|
mpvPlayNextSubtitle: HandleMpvCommandFromIpcOptions['mpvPlayNextSubtitle'];
|
||||||
shiftSubDelayToAdjacentSubtitle: HandleMpvCommandFromIpcOptions['shiftSubDelayToAdjacentSubtitle'];
|
shiftSubDelayToAdjacentSubtitle: HandleMpvCommandFromIpcOptions['shiftSubDelayToAdjacentSubtitle'];
|
||||||
mpvSendCommand: HandleMpvCommandFromIpcOptions['mpvSendCommand'];
|
mpvSendCommand: HandleMpvCommandFromIpcOptions['mpvSendCommand'];
|
||||||
|
resolveProxyCommandOsd?: HandleMpvCommandFromIpcOptions['resolveProxyCommandOsd'];
|
||||||
isMpvConnected: HandleMpvCommandFromIpcOptions['isMpvConnected'];
|
isMpvConnected: HandleMpvCommandFromIpcOptions['isMpvConnected'];
|
||||||
hasRuntimeOptionsManager: HandleMpvCommandFromIpcOptions['hasRuntimeOptionsManager'];
|
hasRuntimeOptionsManager: HandleMpvCommandFromIpcOptions['hasRuntimeOptionsManager'];
|
||||||
}
|
}
|
||||||
@@ -307,6 +309,7 @@ export function createCliCommandRuntimeServiceDeps(
|
|||||||
runCommand: params.jellyfin.runCommand,
|
runCommand: params.jellyfin.runCommand,
|
||||||
},
|
},
|
||||||
ui: {
|
ui: {
|
||||||
|
openFirstRunSetup: params.ui.openFirstRunSetup,
|
||||||
openYomitanSettings: params.ui.openYomitanSettings,
|
openYomitanSettings: params.ui.openYomitanSettings,
|
||||||
cycleSecondarySubMode: params.ui.cycleSecondarySubMode,
|
cycleSecondarySubMode: params.ui.cycleSecondarySubMode,
|
||||||
openRuntimeOptionsPalette: params.ui.openRuntimeOptionsPalette,
|
openRuntimeOptionsPalette: params.ui.openRuntimeOptionsPalette,
|
||||||
@@ -337,6 +340,7 @@ export function createMpvCommandRuntimeServiceDeps(
|
|||||||
mpvPlayNextSubtitle: params.mpvPlayNextSubtitle,
|
mpvPlayNextSubtitle: params.mpvPlayNextSubtitle,
|
||||||
shiftSubDelayToAdjacentSubtitle: params.shiftSubDelayToAdjacentSubtitle,
|
shiftSubDelayToAdjacentSubtitle: params.shiftSubDelayToAdjacentSubtitle,
|
||||||
mpvSendCommand: params.mpvSendCommand,
|
mpvSendCommand: params.mpvSendCommand,
|
||||||
|
resolveProxyCommandOsd: params.resolveProxyCommandOsd,
|
||||||
isMpvConnected: params.isMpvConnected,
|
isMpvConnected: params.isMpvConnected,
|
||||||
hasRuntimeOptionsManager: params.hasRuntimeOptionsManager,
|
hasRuntimeOptionsManager: params.hasRuntimeOptionsManager,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -2,6 +2,12 @@ import type { RuntimeOptionApplyResult, RuntimeOptionId } from '../types';
|
|||||||
import { handleMpvCommandFromIpc } from '../core/services';
|
import { handleMpvCommandFromIpc } from '../core/services';
|
||||||
import { createMpvCommandRuntimeServiceDeps } from './dependencies';
|
import { createMpvCommandRuntimeServiceDeps } from './dependencies';
|
||||||
import { SPECIAL_COMMANDS } from '../config';
|
import { SPECIAL_COMMANDS } from '../config';
|
||||||
|
import { resolveProxyCommandOsdRuntime } from './runtime/mpv-proxy-osd';
|
||||||
|
|
||||||
|
type MpvPropertyClientLike = {
|
||||||
|
connected: boolean;
|
||||||
|
requestProperty: (name: string) => Promise<unknown>;
|
||||||
|
};
|
||||||
|
|
||||||
export interface MpvCommandFromIpcRuntimeDeps {
|
export interface MpvCommandFromIpcRuntimeDeps {
|
||||||
triggerSubsyncFromConfig: () => void;
|
triggerSubsyncFromConfig: () => void;
|
||||||
@@ -12,6 +18,7 @@ export interface MpvCommandFromIpcRuntimeDeps {
|
|||||||
playNextSubtitle: () => void;
|
playNextSubtitle: () => void;
|
||||||
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
||||||
sendMpvCommand: (command: (string | number)[]) => void;
|
sendMpvCommand: (command: (string | number)[]) => void;
|
||||||
|
getMpvClient: () => MpvPropertyClientLike | null;
|
||||||
isMpvConnected: () => boolean;
|
isMpvConnected: () => boolean;
|
||||||
hasRuntimeOptionsManager: () => boolean;
|
hasRuntimeOptionsManager: () => boolean;
|
||||||
}
|
}
|
||||||
@@ -33,6 +40,8 @@ export function handleMpvCommandFromIpcRuntime(
|
|||||||
shiftSubDelayToAdjacentSubtitle: (direction) =>
|
shiftSubDelayToAdjacentSubtitle: (direction) =>
|
||||||
deps.shiftSubDelayToAdjacentSubtitle(direction),
|
deps.shiftSubDelayToAdjacentSubtitle(direction),
|
||||||
mpvSendCommand: deps.sendMpvCommand,
|
mpvSendCommand: deps.sendMpvCommand,
|
||||||
|
resolveProxyCommandOsd: (nextCommand) =>
|
||||||
|
resolveProxyCommandOsdRuntime(nextCommand, deps.getMpvClient),
|
||||||
isMpvConnected: deps.isMpvConnected,
|
isMpvConnected: deps.isMpvConnected,
|
||||||
hasRuntimeOptionsManager: deps.hasRuntimeOptionsManager,
|
hasRuntimeOptionsManager: deps.hasRuntimeOptionsManager,
|
||||||
}),
|
}),
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ test('register subminer protocol client main deps builder maps callbacks', () =>
|
|||||||
execPath: '/tmp/electron',
|
execPath: '/tmp/electron',
|
||||||
resolvePath: (value) => `/abs/${value}`,
|
resolvePath: (value) => `/abs/${value}`,
|
||||||
setAsDefaultProtocolClient: () => true,
|
setAsDefaultProtocolClient: () => true,
|
||||||
logWarn: (message) => calls.push(`warn:${message}`),
|
logDebug: (message) => calls.push(`debug:${message}`),
|
||||||
})();
|
})();
|
||||||
|
|
||||||
assert.equal(deps.isDefaultApp(), true);
|
assert.equal(deps.isDefaultApp(), true);
|
||||||
|
|||||||
@@ -60,6 +60,6 @@ export function createBuildRegisterSubminerProtocolClientMainDepsHandler(
|
|||||||
resolvePath: (value: string) => deps.resolvePath(value),
|
resolvePath: (value: string) => deps.resolvePath(value),
|
||||||
setAsDefaultProtocolClient: (scheme: string, path?: string, args?: string[]) =>
|
setAsDefaultProtocolClient: (scheme: string, path?: string, args?: string[]) =>
|
||||||
deps.setAsDefaultProtocolClient(scheme, path, args),
|
deps.setAsDefaultProtocolClient(scheme, path, args),
|
||||||
logWarn: (message: string, details?: unknown) => deps.logWarn(message, details),
|
logDebug: (message: string, details?: unknown) => deps.logDebug(message, details),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -56,9 +56,26 @@ test('createRegisterSubminerProtocolClientHandler registers default app entry',
|
|||||||
calls.push(`register:${String(args?.[0])}`);
|
calls.push(`register:${String(args?.[0])}`);
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
logWarn: (message) => calls.push(`warn:${message}`),
|
logDebug: (message) => calls.push(`debug:${message}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
register();
|
register();
|
||||||
assert.deepEqual(calls, ['register:/resolved/./entry.js']);
|
assert.deepEqual(calls, ['register:/resolved/./entry.js']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('createRegisterSubminerProtocolClientHandler keeps unsupported registration at debug level', () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
const register = createRegisterSubminerProtocolClientHandler({
|
||||||
|
isDefaultApp: () => false,
|
||||||
|
getArgv: () => ['SubMiner.AppImage'],
|
||||||
|
execPath: '/tmp/SubMiner.AppImage',
|
||||||
|
resolvePath: (value) => value,
|
||||||
|
setAsDefaultProtocolClient: () => false,
|
||||||
|
logDebug: (message) => calls.push(`debug:${message}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
register();
|
||||||
|
assert.deepEqual(calls, [
|
||||||
|
'debug:Failed to register default protocol handler for subminer:// URLs',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ export function createRegisterSubminerProtocolClientHandler(deps: {
|
|||||||
execPath: string;
|
execPath: string;
|
||||||
resolvePath: (value: string) => string;
|
resolvePath: (value: string) => string;
|
||||||
setAsDefaultProtocolClient: (scheme: string, path?: string, args?: string[]) => boolean;
|
setAsDefaultProtocolClient: (scheme: string, path?: string, args?: string[]) => boolean;
|
||||||
logWarn: (message: string, details?: unknown) => void;
|
logDebug: (message: string, details?: unknown) => void;
|
||||||
}) {
|
}) {
|
||||||
return (): void => {
|
return (): void => {
|
||||||
try {
|
try {
|
||||||
@@ -78,10 +78,10 @@ export function createRegisterSubminerProtocolClientHandler(deps: {
|
|||||||
])
|
])
|
||||||
: deps.setAsDefaultProtocolClient('subminer');
|
: deps.setAsDefaultProtocolClient('subminer');
|
||||||
if (!success) {
|
if (!success) {
|
||||||
deps.logWarn('Failed to register default protocol handler for subminer:// URLs');
|
deps.logDebug('Failed to register default protocol handler for subminer:// URLs');
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
deps.logWarn('Failed to register subminer:// protocol handler', error);
|
deps.logDebug('Failed to register subminer:// protocol handler', error);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import { createBuildAppReadyRuntimeMainDepsHandler } from './app-ready-main-deps
|
|||||||
test('app-ready main deps builder returns mapped app-ready runtime deps', async () => {
|
test('app-ready main deps builder returns mapped app-ready runtime deps', async () => {
|
||||||
const calls: string[] = [];
|
const calls: string[] = [];
|
||||||
const onReady = createBuildAppReadyRuntimeMainDepsHandler({
|
const onReady = createBuildAppReadyRuntimeMainDepsHandler({
|
||||||
|
ensureDefaultConfigBootstrap: () => calls.push('bootstrap-config'),
|
||||||
loadSubtitlePosition: () => calls.push('load-subtitle-position'),
|
loadSubtitlePosition: () => calls.push('load-subtitle-position'),
|
||||||
resolveKeybindings: () => calls.push('resolve-keybindings'),
|
resolveKeybindings: () => calls.push('resolve-keybindings'),
|
||||||
createMpvClient: () => calls.push('create-mpv-client'),
|
createMpvClient: () => calls.push('create-mpv-client'),
|
||||||
@@ -16,8 +17,12 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
|
|||||||
setSecondarySubMode: () => calls.push('set-secondary-sub-mode'),
|
setSecondarySubMode: () => calls.push('set-secondary-sub-mode'),
|
||||||
defaultSecondarySubMode: 'hover',
|
defaultSecondarySubMode: 'hover',
|
||||||
defaultWebsocketPort: 5174,
|
defaultWebsocketPort: 5174,
|
||||||
|
defaultAnnotationWebsocketPort: 6678,
|
||||||
|
defaultTexthookerPort: 5174,
|
||||||
hasMpvWebsocketPlugin: () => false,
|
hasMpvWebsocketPlugin: () => false,
|
||||||
startSubtitleWebsocket: () => calls.push('start-ws'),
|
startSubtitleWebsocket: () => calls.push('start-ws'),
|
||||||
|
startAnnotationWebsocket: () => calls.push('start-annotation-ws'),
|
||||||
|
startTexthooker: () => calls.push('start-texthooker'),
|
||||||
log: () => calls.push('log'),
|
log: () => calls.push('log'),
|
||||||
setLogLevel: () => calls.push('set-log-level'),
|
setLogLevel: () => calls.push('set-log-level'),
|
||||||
createMecabTokenizerAndCheck: async () => {
|
createMecabTokenizerAndCheck: async () => {
|
||||||
@@ -31,6 +36,9 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
|
|||||||
loadYomitanExtension: async () => {
|
loadYomitanExtension: async () => {
|
||||||
calls.push('load-yomitan');
|
calls.push('load-yomitan');
|
||||||
},
|
},
|
||||||
|
handleFirstRunSetup: async () => {
|
||||||
|
calls.push('handle-first-run-setup');
|
||||||
|
},
|
||||||
prewarmSubtitleDictionaries: async () => {
|
prewarmSubtitleDictionaries: async () => {
|
||||||
calls.push('prewarm-dicts');
|
calls.push('prewarm-dicts');
|
||||||
},
|
},
|
||||||
@@ -49,6 +57,8 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
|
|||||||
|
|
||||||
assert.equal(onReady.defaultSecondarySubMode, 'hover');
|
assert.equal(onReady.defaultSecondarySubMode, 'hover');
|
||||||
assert.equal(onReady.defaultWebsocketPort, 5174);
|
assert.equal(onReady.defaultWebsocketPort, 5174);
|
||||||
|
assert.equal(onReady.defaultAnnotationWebsocketPort, 6678);
|
||||||
|
assert.equal(onReady.defaultTexthookerPort, 5174);
|
||||||
assert.equal(onReady.texthookerOnlyMode, false);
|
assert.equal(onReady.texthookerOnlyMode, false);
|
||||||
assert.equal(onReady.shouldAutoInitializeOverlayRuntimeFromConfig(), true);
|
assert.equal(onReady.shouldAutoInitializeOverlayRuntimeFromConfig(), true);
|
||||||
assert.equal(onReady.now?.(), 123);
|
assert.equal(onReady.now?.(), 123);
|
||||||
@@ -57,8 +67,10 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
|
|||||||
onReady.createMpvClient();
|
onReady.createMpvClient();
|
||||||
await onReady.createMecabTokenizerAndCheck();
|
await onReady.createMecabTokenizerAndCheck();
|
||||||
await onReady.loadYomitanExtension();
|
await onReady.loadYomitanExtension();
|
||||||
|
await onReady.handleFirstRunSetup();
|
||||||
await onReady.prewarmSubtitleDictionaries?.();
|
await onReady.prewarmSubtitleDictionaries?.();
|
||||||
onReady.startBackgroundWarmups();
|
onReady.startBackgroundWarmups();
|
||||||
|
onReady.startTexthooker(5174);
|
||||||
onReady.setVisibleOverlayVisible(true);
|
onReady.setVisibleOverlayVisible(true);
|
||||||
|
|
||||||
assert.deepEqual(calls, [
|
assert.deepEqual(calls, [
|
||||||
@@ -67,8 +79,10 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
|
|||||||
'create-mpv-client',
|
'create-mpv-client',
|
||||||
'create-mecab',
|
'create-mecab',
|
||||||
'load-yomitan',
|
'load-yomitan',
|
||||||
|
'handle-first-run-setup',
|
||||||
'prewarm-dicts',
|
'prewarm-dicts',
|
||||||
'start-warmups',
|
'start-warmups',
|
||||||
|
'start-texthooker',
|
||||||
'set-visible-overlay',
|
'set-visible-overlay',
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user