mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-21 00:11:27 -07:00
Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
a34a7489db
|
|||
|
e59192bbe1
|
|||
|
e0f82d28f0
|
|||
|
a0521aeeaf
|
|||
|
2127f759ca
|
|||
|
5e787183d0
|
|||
|
81ca31b899
|
|||
|
e2a7597b4f
|
|||
|
2e59c21078
|
|||
|
7b5ab3294d
|
|||
|
2bbf38f987
|
|||
|
f09c91494d
|
|||
|
58ec9b76e0
|
59
.github/workflows/release.yml
vendored
59
.github/workflows/release.yml
vendored
@@ -10,7 +10,6 @@ concurrency:
|
|||||||
cancel-in-progress: false
|
cancel-in-progress: false
|
||||||
|
|
||||||
permissions:
|
permissions:
|
||||||
actions: read
|
|
||||||
contents: write
|
contents: write
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
@@ -104,8 +103,6 @@ jobs:
|
|||||||
|
|
||||||
- name: Build AppImage
|
- name: Build AppImage
|
||||||
run: bun run build:appimage
|
run: bun run build:appimage
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build unversioned AppImage
|
- name: Build unversioned AppImage
|
||||||
run: |
|
run: |
|
||||||
@@ -182,7 +179,6 @@ jobs:
|
|||||||
- name: Build signed + notarized macOS artifacts
|
- name: Build signed + notarized macOS artifacts
|
||||||
run: bun run build:mac
|
run: bun run build:mac
|
||||||
env:
|
env:
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: ${{ secrets.CSC_LINK }}
|
CSC_LINK: ${{ secrets.CSC_LINK }}
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
CSC_KEY_PASSWORD: ${{ secrets.CSC_KEY_PASSWORD }}
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
APPLE_ID: ${{ secrets.APPLE_ID }}
|
||||||
@@ -223,26 +219,6 @@ jobs:
|
|||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-bun-
|
${{ runner.os }}-bun-
|
||||||
|
|
||||||
- name: Validate Windows signing secrets
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
missing=0
|
|
||||||
for name in SIGNPATH_API_TOKEN SIGNPATH_ORGANIZATION_ID SIGNPATH_PROJECT_SLUG SIGNPATH_SIGNING_POLICY_SLUG; do
|
|
||||||
if [ -z "${!name}" ]; then
|
|
||||||
echo "Missing required secret: $name"
|
|
||||||
missing=1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
if [ "$missing" -ne 0 ]; then
|
|
||||||
echo "Set the SignPath Windows signing secrets and rerun."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
SIGNPATH_API_TOKEN: ${{ secrets.SIGNPATH_API_TOKEN }}
|
|
||||||
SIGNPATH_ORGANIZATION_ID: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
|
|
||||||
SIGNPATH_PROJECT_SLUG: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
|
|
||||||
SIGNPATH_SIGNING_POLICY_SLUG: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
|
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: bun install --frozen-lockfile
|
run: bun install --frozen-lockfile
|
||||||
|
|
||||||
@@ -254,40 +230,16 @@ jobs:
|
|||||||
bun run build
|
bun run build
|
||||||
|
|
||||||
- name: Build unsigned Windows artifacts
|
- name: Build unsigned Windows artifacts
|
||||||
run: bun run build:win
|
run: bun run build:win:unsigned
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Upload unsigned Windows artifact for SignPath
|
- name: Upload Windows artifacts
|
||||||
id: upload-unsigned-windows-artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: unsigned-windows
|
|
||||||
path: |
|
|
||||||
release/*.exe
|
|
||||||
release/*.zip
|
|
||||||
if-no-files-found: error
|
|
||||||
|
|
||||||
- name: Submit Windows signing request
|
|
||||||
id: signpath-sign
|
|
||||||
uses: signpath/github-action-submit-signing-request@v2
|
|
||||||
with:
|
|
||||||
api-token: ${{ secrets.SIGNPATH_API_TOKEN }}
|
|
||||||
organization-id: ${{ secrets.SIGNPATH_ORGANIZATION_ID }}
|
|
||||||
project-slug: ${{ secrets.SIGNPATH_PROJECT_SLUG }}
|
|
||||||
signing-policy-slug: ${{ secrets.SIGNPATH_SIGNING_POLICY_SLUG }}
|
|
||||||
github-artifact-id: ${{ steps.upload-unsigned-windows-artifact.outputs.artifact-id }}
|
|
||||||
wait-for-completion: true
|
|
||||||
output-artifact-directory: signed-windows
|
|
||||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Upload signed Windows artifacts
|
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: windows
|
name: windows
|
||||||
path: |
|
path: |
|
||||||
signed-windows/*.exe
|
release/*.exe
|
||||||
signed-windows/*.zip
|
release/*.zip
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
release:
|
release:
|
||||||
needs: [build-linux, build-macos, build-windows]
|
needs: [build-linux, build-macos, build-windows]
|
||||||
@@ -380,6 +332,7 @@ jobs:
|
|||||||
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
||||||
# Do not pass the prerelease flag here; gh defaults to a normal release.
|
# Do not pass the prerelease flag here; gh defaults to a normal release.
|
||||||
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
||||||
|
--draft=false \
|
||||||
--title "${{ steps.version.outputs.VERSION }}" \
|
--title "${{ steps.version.outputs.VERSION }}" \
|
||||||
--notes-file release/release-notes.md
|
--notes-file release/release-notes.md
|
||||||
else
|
else
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -37,3 +37,4 @@ tests/*
|
|||||||
.worktrees/
|
.worktrees/
|
||||||
.codex/*
|
.codex/*
|
||||||
.agents/*
|
.agents/*
|
||||||
|
favicon.png
|
||||||
|
|||||||
53
CHANGELOG.md
53
CHANGELOG.md
@@ -1,6 +1,52 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## v0.5.5 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Overlay: Added `f` as the default overlay fullscreen toggle and changed the default AniSkip intro-jump key to `Tab`.
|
||||||
|
- Dictionary: Aligned AniList character dictionary generation more closely with the upstream reference by preserving duplicate shared names across characters, skipping characters without native Japanese names, restoring richer character info fields, and using upstream-style role mapping plus hint-aware kanji readings.
|
||||||
|
- Startup: Ordered startup OSD messages so tokenization loads first, annotation loading appears next if still pending, and character dictionary sync progress waits until annotation loading finishes.
|
||||||
|
- Dictionary: Added a visible startup OSD step for merged character-dictionary building so long rebuilds show progress before the later import/upload phase.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Dictionary: Fixed AniList media guessing for character dictionary auto-sync by using filename-only `guessit` input and preserving multi-part guessit titles instead of truncating them to the first segment.
|
||||||
|
- Dictionary: Refresh the current subtitle after character dictionary auto-sync completes so newly imported character names highlight on the active line instead of waiting for the next subtitle change.
|
||||||
|
- Dictionary: Show character dictionary auto-sync progress on the mpv OSD without sending desktop notifications.
|
||||||
|
- Dictionary: Keep character dictionary auto-sync non-blocking during startup by letting snapshot/build work run in parallel and delaying only the Yomitan import/settings phase until current-media tokenization is already ready.
|
||||||
|
- Overlay: Fixed visible overlay keyboard handling so pressing `Tab` still reaches mpv and triggers the default AniSkip skip-intro binding while the overlay has focus.
|
||||||
|
- Plugin: Fix Windows mpv plugin binary override lookup so `SUBMINER_BINARY_PATH` still resolves to `SubMiner.exe` when no AppImage override is set.
|
||||||
|
|
||||||
|
## v0.5.3 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Release: Publish unsigned Windows `.exe` and `.zip` artifacts directly from release CI instead of routing them through SignPath.
|
||||||
|
- Release: Added `bun run build:win:unsigned` for explicit local unsigned Windows packaging.
|
||||||
|
|
||||||
|
## v0.5.2 (2026-03-09)
|
||||||
|
|
||||||
|
### Internal
|
||||||
|
|
||||||
|
- Release: Pinned the Windows SignPath submission workflow to an explicit artifact-configuration slug instead of relying on the SignPath project's default configuration.
|
||||||
|
|
||||||
|
## v0.5.1 (2026-03-09)
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Launcher: Removed the YouTube subtitle generation mode switch so YouTube playback always preloads subtitles before mpv starts.
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Launcher: Hardened YouTube AI subtitle fixing so fenced SRT output and text-only one-cue-per-block responses can still be applied without losing original cue timing.
|
||||||
|
- Launcher: Skipped AniSkip lookup during URL playback and YouTube subtitle-preload playback, limiting AniSkip to local file targets where it can actually resolve anime metadata.
|
||||||
|
- Launcher: Keep the background SubMiner process running after a launcher-managed mpv session exits so the next mpv instance can reconnect without restarting the app.
|
||||||
|
- Launcher: Reuse prior tokenization readiness after the background app is already warm so reopening a video does not pause again waiting for duplicate warmup completion.
|
||||||
|
- Windows: Acquire the app single-instance lock earlier so Windows overlay/video launches reuse the running background SubMiner process instead of booting a second full app and repeating startup warmups.
|
||||||
|
|
||||||
## v0.3.0 (2026-03-05)
|
## v0.3.0 (2026-03-05)
|
||||||
|
|
||||||
- Added keyboard-driven Yomitan navigation and popup controls, including optional auto-pause.
|
- Added keyboard-driven Yomitan navigation and popup controls, including optional auto-pause.
|
||||||
- Added subtitle/jump keyboard handling fixes for smoother subtitle playback control.
|
- Added subtitle/jump keyboard handling fixes for smoother subtitle playback control.
|
||||||
- Improved Anki/Yomitan reliability with stronger Yomitan proxy syncing and safer extension refresh logic.
|
- Improved Anki/Yomitan reliability with stronger Yomitan proxy syncing and safer extension refresh logic.
|
||||||
@@ -11,6 +57,7 @@
|
|||||||
- Removed docs Plausible integration and cleaned associated tracker settings.
|
- Removed docs Plausible integration and cleaned associated tracker settings.
|
||||||
|
|
||||||
## v0.2.3 (2026-03-02)
|
## v0.2.3 (2026-03-02)
|
||||||
|
|
||||||
- Added performance and tokenization optimizations (faster warmup, persistent MeCab usage, reduced enrichment lookups).
|
- Added performance and tokenization optimizations (faster warmup, persistent MeCab usage, reduced enrichment lookups).
|
||||||
- Added subtitle controls for no-jump delay shifts.
|
- Added subtitle controls for no-jump delay shifts.
|
||||||
- Improved subtitle highlight logic with priority and reliability fixes.
|
- Improved subtitle highlight logic with priority and reliability fixes.
|
||||||
@@ -19,30 +66,36 @@
|
|||||||
- Updated startup flow to load dictionaries asynchronously and unblock first tokenization sooner.
|
- Updated startup flow to load dictionaries asynchronously and unblock first tokenization sooner.
|
||||||
|
|
||||||
## v0.2.2 (2026-03-01)
|
## v0.2.2 (2026-03-01)
|
||||||
|
|
||||||
- Improved subtitle highlighting reliability for frequency modes.
|
- Improved subtitle highlighting reliability for frequency modes.
|
||||||
- Fixed Jellyfin misc info formatting cleanup.
|
- Fixed Jellyfin misc info formatting cleanup.
|
||||||
- Version bump maintenance for 0.2.2.
|
- Version bump maintenance for 0.2.2.
|
||||||
|
|
||||||
## v0.2.1 (2026-03-01)
|
## v0.2.1 (2026-03-01)
|
||||||
|
|
||||||
- Delivered Jellyfin and Subsync fixes from release patch cycle.
|
- Delivered Jellyfin and Subsync fixes from release patch cycle.
|
||||||
- Version bump maintenance for 0.2.1.
|
- Version bump maintenance for 0.2.1.
|
||||||
|
|
||||||
## v0.2.0 (2026-03-01)
|
## v0.2.0 (2026-03-01)
|
||||||
|
|
||||||
- Added task-related release work for the overlay 2.0 cycle.
|
- Added task-related release work for the overlay 2.0 cycle.
|
||||||
- Introduced Overlay 2.0.
|
- Introduced Overlay 2.0.
|
||||||
- Improved release automation reliability.
|
- Improved release automation reliability.
|
||||||
|
|
||||||
## v0.1.2 (2026-02-24)
|
## v0.1.2 (2026-02-24)
|
||||||
|
|
||||||
- Added encrypted AniList token handling and default GNOME keyring support.
|
- Added encrypted AniList token handling and default GNOME keyring support.
|
||||||
- Added launcher passthrough for password-store flows (Jellyfin path).
|
- Added launcher passthrough for password-store flows (Jellyfin path).
|
||||||
- Updated docs for auth and integration behavior.
|
- Updated docs for auth and integration behavior.
|
||||||
- Version bump maintenance for 0.1.2.
|
- Version bump maintenance for 0.1.2.
|
||||||
|
|
||||||
## v0.1.1 (2026-02-23)
|
## v0.1.1 (2026-02-23)
|
||||||
|
|
||||||
- Fixed overlay modal focus handling (`grab input`) behavior.
|
- Fixed overlay modal focus handling (`grab input`) behavior.
|
||||||
- Version bump maintenance for 0.1.1.
|
- Version bump maintenance for 0.1.1.
|
||||||
|
|
||||||
## v0.1.0 (2026-02-23)
|
## v0.1.0 (2026-02-23)
|
||||||
|
|
||||||
- Bootstrapped Electron runtime, services, and composition model.
|
- Bootstrapped Electron runtime, services, and composition model.
|
||||||
- Added runtime asset packaging and dependency vendoring.
|
- Added runtime asset packaging and dependency vendoring.
|
||||||
- Added project docs baseline, setup guides, architecture notes, and submodule/runtime assets.
|
- Added project docs baseline, setup guides, architecture notes, and submodule/runtime assets.
|
||||||
|
|||||||
21
README.md
21
README.md
@@ -20,8 +20,6 @@
|
|||||||
|
|
||||||
<br />
|
<br />
|
||||||
|
|
||||||
Initial packaged Windows support is now available alongside the existing Linux and macOS builds.
|
|
||||||
|
|
||||||
## What it does
|
## What it does
|
||||||
|
|
||||||
SubMiner is an Electron overlay that sits on top of mpv. It turns your video player into a full sentence-mining workstation:
|
SubMiner is an Electron overlay that sits on top of mpv. It turns your video player into a full sentence-mining workstation:
|
||||||
@@ -31,7 +29,7 @@ SubMiner is an Electron overlay that sits on top of mpv. It turns your video pla
|
|||||||
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
|
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
|
||||||
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
|
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
|
||||||
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
|
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
|
||||||
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave (`subtitleStyle.autoPauseVideoOnHover`)
|
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave
|
||||||
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
|
||||||
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
|
||||||
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
|
||||||
@@ -71,7 +69,7 @@ SubMiner.AppImage
|
|||||||
|
|
||||||
On macOS, launch `SubMiner.app`. On Windows, launch `SubMiner.exe` from the Start menu or install directory.
|
On macOS, launch `SubMiner.app`. On Windows, launch `SubMiner.exe` from the Start menu or install directory.
|
||||||
|
|
||||||
On first launch, SubMiner now:
|
On first launch, SubMiner:
|
||||||
|
|
||||||
- starts in the tray/background
|
- starts in the tray/background
|
||||||
- creates the default config directory and `config.jsonc`
|
- creates the default config directory and `config.jsonc`
|
||||||
@@ -79,8 +77,6 @@ On first launch, SubMiner now:
|
|||||||
- can install the mpv plugin to the default mpv scripts location for you
|
- can install the mpv plugin to the default mpv scripts location for you
|
||||||
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
- links directly to Yomitan settings so you can install dictionaries before finishing setup
|
||||||
|
|
||||||
Existing installs that already have a valid config plus at least one Yomitan dictionary are auto-detected as complete and will not be re-prompted.
|
|
||||||
|
|
||||||
### 3. Finish setup
|
### 3. Finish setup
|
||||||
|
|
||||||
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
- click `Install mpv plugin` if you want the default plugin auto-start flow
|
||||||
@@ -114,20 +110,9 @@ Windows builds use native window tracking and do not require the Linux composito
|
|||||||
|
|
||||||
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
|
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
- Run `bun run test` or `bun run test:fast` for the default fast lane: config/core coverage plus representative entry/runtime, Anki integration, and main runtime checks.
|
|
||||||
- Run `bun run test:full` for the maintained test surface: Bun-compatible `src/**` coverage, Bun-compatible launcher unit coverage, and the maintained dist compatibility slice for `ipc`, `anki-jimaku-ipc`, `overlay-manager`, `config-validation`, `startup-config`, and runtime registry coverage.
|
|
||||||
- Run `bun run test:node:compat` directly when you only need that dist compatibility slice. The command name is legacy; it now runs under Bun.
|
|
||||||
- Run `bun run test:env` for environment-specific verification: launcher smoke/plugin checks plus the SQLite-backed immersion tracker lane.
|
|
||||||
- Run `bun run test:immersion:sqlite` when you specifically need the dist SQLite persistence coverage.
|
|
||||||
- Run `bun run test:subtitle` for the maintained `alass`/`ffsubsync` subtitle surface.
|
|
||||||
|
|
||||||
The Bun-managed discovery lanes intentionally exclude a small set of suites from the source-file discovery pass and keep them in the maintained dist compatibility slice instead: Electron-focused tests in `src/core/services/ipc.test.ts`, `src/core/services/anki-jimaku-ipc.test.ts`, and `src/core/services/overlay-manager.test.ts`, plus runtime/config tests in `src/main/config-validation.test.ts`, `src/main/runtime/startup-config.test.ts`, and `src/main/runtime/registry.test.ts`. `bun run test:node:compat` keeps those suites in the standard workflow instead of leaving them untracked.
|
|
||||||
|
|
||||||
## Acknowledgments
|
## Acknowledgments
|
||||||
|
|
||||||
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
|
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan), and JLPT tags from [yomitan-jlpt-vocab](https://github.com/stephenmk/yomitan-jlpt-vocab).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
|
|||||||
@@ -0,0 +1,80 @@
|
|||||||
|
---
|
||||||
|
id: TASK-131
|
||||||
|
title: Make default overlay fullscreen and AniSkip end-jump keybindings easier to reach
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:30'
|
||||||
|
labels:
|
||||||
|
- enhancement
|
||||||
|
- overlay
|
||||||
|
- mpv
|
||||||
|
- aniskip
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Make two default keyboard actions easier to hit during playback: add `f` as the built-in overlay fullscreen toggle, and make AniSkip's default intro-end jump use `Tab`.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Default overlay keybindings include `KeyF` mapped to mpv fullscreen toggle.
|
||||||
|
- [x] #2 Default AniSkip hint/button key defaults to `Tab` and the plugin registers that binding.
|
||||||
|
- [x] #3 Automated regression coverage exists for both default bindings.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
|
||||||
|
1. Add a failing TypeScript regression proving default overlay keybindings include fullscreen on `KeyF`.
|
||||||
|
2. Add a failing Lua/plugin regression proving AniSkip defaults to `Tab`, updates the OSD hint text, and registers the expected keybinding.
|
||||||
|
3. Patch the default keybinding/config values with minimal behavior changes and keep fallback binding behavior intentional.
|
||||||
|
4. Run focused tests plus touched verification commands, then record results and a short changelog fragment.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added `KeyF -> ['cycle', 'fullscreen']` to the built-in overlay keybindings in `src/config/definitions/shared.ts`.
|
||||||
|
|
||||||
|
Changed the mpv plugin AniSkip default button key from `y-k` to `TAB` in both the runtime default options and the shipped `plugin/subminer.conf`. The AniSkip OSD hint now also falls back to `TAB` when no explicit key is configured.
|
||||||
|
|
||||||
|
Adjusted `plugin/subminer/ui.lua` fallback registration so the legacy `y-k` binding is only added for custom non-default AniSkip bindings, instead of always shadowing the new default.
|
||||||
|
|
||||||
|
Extended regression coverage:
|
||||||
|
|
||||||
|
- `src/config/definitions/domain-registry.test.ts` now asserts the default fullscreen binding on `KeyF`.
|
||||||
|
- `scripts/test-plugin-start-gate.lua` now isolates plugin runs correctly, records keybinding/observer registration, and asserts the default AniSkip keybinding/prompt behavior for `TAB`.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/config/definitions/domain-registry.test.ts`
|
||||||
|
- `bun run test:config:src`
|
||||||
|
- `lua scripts/test-plugin-start-gate.lua`
|
||||||
|
- `bun run changelog:lint`
|
||||||
|
- `bun run typecheck`
|
||||||
|
|
||||||
|
Known unrelated verification gap:
|
||||||
|
|
||||||
|
- `bun run test:plugin:src` still fails in `scripts/test-plugin-binary-windows.lua` on this Linux host (`windows env override should resolve .exe suffix`), outside the keybinding changes in this task.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Default overlay playback now has an easier fullscreen toggle on `f`, and AniSkip's default intro-end jump now uses `Tab`. The mpv plugin hint text and registration logic were updated to match the new default, while keeping legacy `y-k` fallback behavior limited to custom non-default bindings.
|
||||||
|
|
||||||
|
Regression coverage was added for both defaults, and the plugin test harness now resets plugin bootstrap state between scenarios so keybinding assertions can run reliably.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
id: TASK-134
|
||||||
|
title: Harden Windows release signing against transient SignPath failures
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-08 20:23'
|
||||||
|
labels:
|
||||||
|
- ci
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
- signing
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- package.json
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
- https://github.com/ksyasuda/SubMiner/actions/runs/22836585479
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The tag-driven Release workflow currently fails the Windows lane if the SignPath connector returns transient 502 errors during submission, and the tagged build scripts also allow electron-builder to implicitly publish unsigned artifacts before the final release job runs. Harden the workflow so transient SignPath outages get bounded retries and release packaging never auto-publishes unsigned assets.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Windows release signing retries transient SignPath submission failures within the release workflow before failing the job.
|
||||||
|
- [ ] #2 Release packaging scripts disable electron-builder implicit publish so build jobs do not upload unsigned assets on tag builds.
|
||||||
|
- [ ] #3 Regression coverage fails if SignPath retry scaffolding or publish suppression is removed.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a regression test for the release workflow/package script shape covering SignPath retries and `--publish never`.
|
||||||
|
2. Patch the Windows release job to retry SignPath submission a bounded number of times and still fail hard if every attempt fails.
|
||||||
|
3. Update tagged package build scripts to disable implicit electron-builder publishing during release builds.
|
||||||
|
4. Run targeted release-workflow verification and capture any remaining manual release cleanup needed for `v0.5.0`.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
The failed Windows signing step in GitHub Actions run `22836585479` was not caused by missing secrets or an artifact-shape mismatch. The SignPath GitHub action retried repeated `502` responses from the SignPath connector for several minutes and then failed the job.
|
||||||
|
|
||||||
|
Hardened `.github/workflows/release.yml` by replacing the single SignPath submission with three bounded attempts. The second and third submissions only run if the previous attempt failed, and the job now fails with an explicit rerun message only after all three attempts fail. Signed-artifact upload is keyed to the successful attempt so the release job still consumes the normal `windows` artifact name.
|
||||||
|
|
||||||
|
Also fixed a separate release regression exposed by the same run: `electron-builder` was implicitly publishing unsigned release assets during tag builds because the packaging scripts did not set `--publish never` and the workflow injected `GH_TOKEN` into build jobs. Updated the relevant package scripts to pass `--publish never`, removed `GH_TOKEN` from the packaging jobs, and made the final publish step force `--draft=false` when editing an existing tag release so previously-created draft releases get published.
|
||||||
|
|
||||||
|
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, and `bun run test:fast` all passed locally after restoring the missing local `libsql` install with `bun install --frozen-lockfile`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Windows release signing is now resilient to transient SignPath connector outages. The release workflow retries the SignPath submission up to three times before failing, and only uploads the signed Windows artifact from the attempt that succeeded.
|
||||||
|
|
||||||
|
Release packaging also no longer auto-publishes unsigned assets on tag builds. The `electron-builder` scripts now force `--publish never`, the build jobs no longer pass `GH_TOKEN` into packaging steps, and the final GitHub release publish step explicitly clears draft state when updating an existing tag release.
|
||||||
|
|
||||||
|
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
Manual follow-up for the failed `v0.5.0` release: rerun the `Release` workflow after merging/pushing this fix, then clean up the stray draft/untagged release assets created by the failed run if they remain.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-135
|
||||||
|
title: Cut patch release v0.5.1 for Windows signing fix
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:24'
|
||||||
|
updated_date: '2026-03-08 20:28'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-134
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the workflow-signing fix on `main` by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.1` tag instead of rewriting the failed `v0.5.0` tag.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Repository version metadata is updated to `0.5.1`.
|
||||||
|
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.1` section and released fragments are removed.
|
||||||
|
- [ ] #3 New `v0.5.1` commit and tag are pushed to `origin`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Bump the package version to `0.5.1`.
|
||||||
|
2. Run the changelog builder so `CHANGELOG.md`/`release-notes.md` match the release workflow contract.
|
||||||
|
3. Run the relevant verification commands.
|
||||||
|
4. Commit the release-prep changes, create `v0.5.1`, and push both commit and tag.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.0` to `0.5.1`, then ran `bun run changelog:build` so the committed release artifacts match the release workflow contract. That prepended the `v0.5.1` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed changelog fragments from `changes/`.
|
||||||
|
|
||||||
|
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.1` from the signing-workflow fix on `main` instead of rewriting the failed `v0.5.0` tag. Repository version metadata, changelog, and committed release notes are all aligned with the new release tag, and the consumed changelog fragments were removed.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.1`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,61 @@
|
|||||||
|
---
|
||||||
|
id: TASK-136
|
||||||
|
title: Pin SignPath artifact configuration in release workflow
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:41'
|
||||||
|
updated_date: '2026-03-08 20:58'
|
||||||
|
labels:
|
||||||
|
- ci
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
- signing
|
||||||
|
dependencies:
|
||||||
|
- TASK-134
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- build/signpath-windows-artifact-config.xml
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
The Windows release workflow currently relies on the default SignPath artifact configuration configured in the SignPath UI. Pin the workflow to an explicit artifact-configuration slug so the checked-in signing configuration and CI behavior stay deterministic across future SignPath project changes.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 The Windows release workflow validates a dedicated SignPath artifact-configuration secret/input.
|
||||||
|
- [ ] #2 Every SignPath submission attempt passes `artifact-configuration-slug`.
|
||||||
|
- [ ] #3 Regression coverage fails if the explicit SignPath artifact-configuration binding is removed.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a failing workflow regression test for the explicit SignPath artifact-configuration slug.
|
||||||
|
2. Patch the Windows signing secret validation and SignPath action inputs to require the slug.
|
||||||
|
3. Run targeted release-workflow verification plus the standard fast lane.
|
||||||
|
4. Cut a new patch release so the tag-triggered release workflow runs with the pinned SignPath configuration.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added regression coverage in `src/release-workflow.test.ts` for an explicit SignPath artifact-configuration slug so the release workflow test now fails if the slug validation or action input is removed.
|
||||||
|
|
||||||
|
Patched `.github/workflows/release.yml` so Windows signing now requires `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` during secret validation and passes `artifact-configuration-slug: ${{ secrets.SIGNPATH_ARTIFACT_CONFIGURATION_SLUG }}` on every SignPath submission attempt.
|
||||||
|
|
||||||
|
Verification: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
The release workflow is now pinned to an explicit SignPath artifact configuration instead of relying on whichever SignPath artifact config is marked default in the UI. Windows signing secret validation fails fast if `SIGNPATH_ARTIFACT_CONFIGURATION_SLUG` is missing, and every SignPath submission attempt now includes the pinned slug.
|
||||||
|
|
||||||
|
Validation: `bun test src/release-workflow.test.ts`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-137
|
||||||
|
title: Cut patch release v0.5.2 for SignPath artifact config pinning
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-08 20:44'
|
||||||
|
updated_date: '2026-03-08 20:58'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-136
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the SignPath artifact-configuration pinning change by bumping the app version, generating the committed changelog artifacts for the new version, and pushing a new `v0.5.2` tag.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [ ] #1 Repository version metadata is updated to `0.5.2`.
|
||||||
|
- [ ] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.2` section and consumed fragments are removed.
|
||||||
|
- [ ] #3 New `v0.5.2` commit and tag are pushed to `origin`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add the release fragment for the SignPath configuration pinning change.
|
||||||
|
2. Bump `package.json` to `0.5.2` and run the changelog builder.
|
||||||
|
3. Run changelog/typecheck/test verification.
|
||||||
|
4. Commit the release-prep change set, create `v0.5.2`, and push commit plus tag.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.1` to `0.5.2`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.2` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/signpath-artifact-config-pin.md` fragment.
|
||||||
|
|
||||||
|
Verification before tagging: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.2` so the explicit SignPath artifact-configuration pin ships on a fresh release tag. Version metadata, committed changelog artifacts, and release notes are aligned with the new patch version.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.2`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,62 @@
|
|||||||
|
---
|
||||||
|
id: TASK-138
|
||||||
|
title: Publish unsigned Windows release artifacts and add local unsigned build script
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- windows
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- .github/workflows/release.yml
|
||||||
|
- package.json
|
||||||
|
- src/release-workflow.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Stop the tag-driven release workflow from depending on SignPath and publish unsigned Windows `.exe` and `.zip` artifacts directly. Add an explicit local `build:win:unsigned` script without changing the existing `build:win` command.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Windows release CI builds unsigned artifacts without requiring SignPath secrets.
|
||||||
|
- [x] #2 The Windows release job uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact.
|
||||||
|
- [x] #3 The repo exposes a local `build:win:unsigned` script for explicit unsigned Windows packaging.
|
||||||
|
- [x] #4 Regression coverage fails if the workflow reintroduces SignPath submission or drops the unsigned script.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Update workflow regression tests to assert unsigned Windows release behavior and the new local script.
|
||||||
|
2. Patch `package.json` to add `build:win:unsigned`.
|
||||||
|
3. Patch `.github/workflows/release.yml` to build unsigned Windows artifacts and upload them directly.
|
||||||
|
4. Add the release changelog fragment and run focused verification.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Removed the Windows SignPath secret validation and submission steps from `.github/workflows/release.yml`. The Windows release job now runs `bun run build:win:unsigned` and uploads `release/*.exe` and `release/*.zip` directly as the `windows` artifact consumed by the release job.
|
||||||
|
|
||||||
|
Added `scripts/build-win-unsigned.mjs` plus the `build:win:unsigned` package script. The wrapper clears Windows code-signing environment variables and disables identity auto-discovery before invoking `electron-builder`, so release CI stays unsigned even if signing credentials are configured elsewhere.
|
||||||
|
|
||||||
|
Updated `src/release-workflow.test.ts` to assert the unsigned workflow contract and added the release changelog fragment in `changes/unsigned-windows-release-builds.md`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Windows release CI now publishes unsigned artifacts directly and no longer depends on SignPath. Local developers also have an explicit `bun run build:win:unsigned` path for unsigned packaging without changing the existing `build:win` command.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
- `bun test src/release-workflow.test.ts`
|
||||||
|
- `bun run typecheck`
|
||||||
|
- `node --check scripts/build-win-unsigned.mjs`
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
id: TASK-139
|
||||||
|
title: Cut patch release v0.5.3 for unsigned Windows release builds
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-138
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- release/release-notes.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Publish a patch release from the unsigned Windows release-build change by bumping the app version, generating committed changelog artifacts for `v0.5.3`, and pushing the release-prep commit.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Repository version metadata is updated to `0.5.3`.
|
||||||
|
- [x] #2 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.5.3` section and consumed fragments are removed.
|
||||||
|
- [x] #3 New `v0.5.3` release-prep commit is pushed to `origin/main`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Bump `package.json` from `0.5.2` to `0.5.3`.
|
||||||
|
2. Run `bun run changelog:build` so committed changelog artifacts match the new patch version.
|
||||||
|
3. Run changelog/typecheck/test verification.
|
||||||
|
4. Commit the release-prep change set and push `main`.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Bumped `package.json` from `0.5.2` to `0.5.3`, ran `bun run changelog:build`, and committed the generated release artifacts. That prepended the `v0.5.3` section to `CHANGELOG.md`, regenerated `release/release-notes.md`, and removed the consumed `changes/unsigned-windows-release-builds.md` fragment.
|
||||||
|
|
||||||
|
Verification before push: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, and `bun run test:fast`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.3` so the unsigned Windows release-build change is captured in committed release metadata on `main`. Version metadata, changelog output, and release notes are aligned with the new patch version.
|
||||||
|
|
||||||
|
Validation: `bun run changelog:lint`, `bun run changelog:check --version 0.5.3`, `bun run typecheck`, `bun run test:fast`.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,38 @@
|
|||||||
|
---
|
||||||
|
id: TASK-140
|
||||||
|
title: Fix guessit title parsing for character dictionary sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:25'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- anilist
|
||||||
|
- bug
|
||||||
|
- guessit
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/core/services/anilist/anilist-updater.test.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Fix AniList character dictionary auto-sync for filenames where `guessit` misparses the full path and our title extraction keeps only the first array segment, causing AniList resolution to match the wrong anime and abort merged dictionary refresh.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 AniList media guessing passes basename-only targets to `guessit` so parent folder names do not corrupt series title detection.
|
||||||
|
- [x] #2 Guessit title arrays are combined into one usable title instead of truncating to the first segment.
|
||||||
|
- [x] #3 Regression coverage includes the Bunny Girl Senpai filename shape that previously resolved to the wrong AniList entry.
|
||||||
|
- [x] #4 Verification confirms the targeted AniList guessing tests pass.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Root repro: `guessit` parsed the Bunny Girl Senpai full path as `title: ["Rascal", "Does-not-Dream-of-Bunny-Girl-Senapi"]`, and our `firstString` helper kept only `Rascal`, which resolved to AniList 3490 (`rayca`) and produced zero character results. Fixed by sending basename-only input to `guessit` and joining multi-part guessit title arrays.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
---
|
||||||
|
id: TASK-141
|
||||||
|
title: Refresh current subtitle after character dictionary sync completes
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:55'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- bug
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
When character dictionary auto-sync finishes after startup tokenization, invalidate cached subtitle tokenization and refresh the current subtitle so character-name highlighting catches up without waiting for the next subtitle line.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Successful character dictionary sync exposes a completion hook for main runtime follow-up.
|
||||||
|
- [x] #2 Main runtime clears Yomitan parser caches and refreshes the current subtitle after sync completion.
|
||||||
|
- [x] #3 Regression coverage verifies the sync completion callback fires on successful sync.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Observed on Bunny Girl Senpai startup: autoplay/tokenization became ready around 8s, but snapshot/import/state write completed roughly 31s after launch, leaving the current subtitle tokenized without the newly imported character dictionary. Fixed by adding an auto-sync completion hook that clears parser caches and refreshes the current subtitle.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-142
|
||||||
|
title: Show character dictionary auto-sync progress on OSD
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 01:10'
|
||||||
|
updated_date: '2026-03-09 01:10'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
When character dictionary auto-sync runs for a newly opened anime, surface progress so users know why character-name lookup/highlighting is temporarily unavailable via the mpv OSD without desktop notification popups.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary auto-sync emits progress events for syncing, importing, ready, and failure states.
|
||||||
|
- [x] #2 Main runtime routes those progress events through OSD notifications without desktop notifications.
|
||||||
|
- [x] #3 Regression coverage verifies progress events and notification routing behavior.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
OSD now shows auto-sync phase changes while the dictionary updates. Desktop notifications were removed for this path to avoid startup popup spam.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-143
|
||||||
|
title: Keep character dictionary auto-sync non-blocking during startup
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 01:45'
|
||||||
|
updated_date: '2026-03-09 01:45'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- startup
|
||||||
|
- performance
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/current-media-tokenization-gate.ts
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Keep character dictionary auto-sync running in parallel during startup without delaying playback. Only tokenization readiness should gate playback; character dictionary import/settings updates should wait until tokenization is already ready and then refresh annotations afterward.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary snapshot/build work can run immediately during startup.
|
||||||
|
- [x] #2 Yomitan dictionary mutation work waits until current-media tokenization is ready.
|
||||||
|
- [x] #3 Regression coverage verifies auto-sync builds before the gate and only mutates Yomitan after the gate resolves.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a small current-media tokenization gate in main runtime. Media changes reset the gate, the first tokenization-ready event marks it ready, and auto-sync now waits on that gate only before Yomitan dictionary inspection/import/settings updates. Snapshot generation and merged ZIP build still run immediately in parallel.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
id: TASK-144
|
||||||
|
title: Sequence startup OSD notifications for tokenization, annotations, and character dictionary sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 10:40'
|
||||||
|
updated_date: '2026-03-09 10:40'
|
||||||
|
labels:
|
||||||
|
- startup
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/subtitle-tokenization-main-deps.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync-notifications.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Keep startup OSD progress ordered. While tokenization is still pending, only show the tokenization loading message. After tokenization becomes ready, show annotation loading if annotation warmup still remains. Only surface character dictionary auto-sync progress after annotation loading clears, and only if the dictionary work is still active.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Character dictionary progress stays hidden while tokenization startup loading is still active.
|
||||||
|
- [x] #2 Annotation loading OSD appears after tokenization readiness and before any later character dictionary progress.
|
||||||
|
- [x] #3 Regression coverage verifies buffered dictionary progress/failure ordering during startup.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a small startup OSD sequencer in main runtime. Annotation warmup OSD now flows through that sequencer, and character dictionary sync notifications buffer until tokenization plus annotation loading clear. Buffered `ready` updates are dropped if dictionary progress finished before it ever became visible, while buffered failures still surface after annotation loading completes.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
id: TASK-145
|
||||||
|
title: Show character dictionary build progress on startup OSD before import
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 11:20'
|
||||||
|
updated_date: '2026-03-09 11:20'
|
||||||
|
labels:
|
||||||
|
- startup
|
||||||
|
- dictionary
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.test.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Surface an explicit character-dictionary build phase on startup OSD so there is visible progress between subtitle annotation loading and the later import/upload step when merged dictionary generation is still running.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Auto-sync emits a dedicated in-flight status while merged dictionary generation is running.
|
||||||
|
- [x] #2 Startup OSD sequencing treats that build phase as progress and can surface it after annotation loading clears.
|
||||||
|
- [x] #3 Regression coverage verifies the build phase is emitted before import begins.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Added a `building` progress phase before `buildMergedDictionary(...)` and included it in the startup OSD sequencer's buffered progress set. This gives startup a visible dictionary-progress step even when snapshot checking/generation finished too early to still be relevant by the time annotation loading completes.
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
@@ -0,0 +1,46 @@
|
|||||||
|
---
|
||||||
|
id: TASK-145
|
||||||
|
title: Show checking and generation OSD for character dictionary auto-sync
|
||||||
|
status: Done
|
||||||
|
assignee: []
|
||||||
|
created_date: '2026-03-09 11:20'
|
||||||
|
updated_date: '2026-03-09 11:20'
|
||||||
|
labels:
|
||||||
|
- dictionary
|
||||||
|
- overlay
|
||||||
|
- ux
|
||||||
|
dependencies: []
|
||||||
|
references:
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/character-dictionary-auto-sync.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/runtime/startup-osd-sequencer.ts
|
||||||
|
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Surface an immediate startup OSD that the character dictionary is being checked, and show a distinct generating message only when the current AniList media actually needs a fresh snapshot build instead of reusing a cached one.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Auto-sync emits a `checking` progress event before snapshot resolution completes.
|
||||||
|
- [x] #2 Auto-sync emits `generating` only for snapshot cache misses and keeps `updating`/`importing` as later phases.
|
||||||
|
- [x] #3 Startup OSD sequencing still prioritizes tokenization then annotation loading before buffered dictionary progress.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Character dictionary auto-sync now emits `Checking character dictionary...` as soon as the AniList media is resolved, then emits `Generating character dictionary...` only when the snapshot layer misses and a real rebuild begins. Cached snapshots skip the generating phase and continue straight into the later update/import flow.
|
||||||
|
|
||||||
|
Wired those progress callbacks through the character-dictionary runtime boundary, updated the startup OSD sequencer to treat checking/generating as dictionary-progress phases with the same tokenization and annotation precedence, and added regression coverage for cache-hit vs cache-miss behavior plus buffered startup ordering.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,66 @@
|
|||||||
|
---
|
||||||
|
id: TASK-146
|
||||||
|
title: Forward overlay Tab to mpv for AniSkip
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- bug
|
||||||
|
- overlay
|
||||||
|
- aniskip
|
||||||
|
- linux
|
||||||
|
dependencies: []
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Fix visible-overlay keyboard handling so bare `Tab` is forwarded to mpv instead of being consumed by Electron focus navigation. This restores the default AniSkip `TAB` binding while the overlay has focus, especially on Linux.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 Visible overlay forwards bare `Tab` to mpv as `keypress TAB`.
|
||||||
|
- [x] #2 Modal overlays keep their existing local `Tab` behavior.
|
||||||
|
- [x] #3 Automated regression coverage exists for the input handler and overlay factory wiring.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
|
||||||
|
1. Add a failing regression around visible-overlay `before-input-event` handling for bare `Tab`.
|
||||||
|
2. Add/extend overlay factory tests so the new mpv-forward callback is wired through runtime construction.
|
||||||
|
3. Patch overlay input handling to intercept visible-overlay `Tab` and send mpv `keypress TAB`.
|
||||||
|
4. Run focused overlay tests, typecheck, and changelog validation.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
|
||||||
|
Extracted visible-overlay input handling into `src/core/services/overlay-window-input.ts` so the `Tab` forwarding decision can be unit tested without loading Electron window primitives.
|
||||||
|
|
||||||
|
Visible overlay `before-input-event` now intercepts bare `Tab`, prevents the browser default, and forwards mpv `keypress TAB` through the existing mpv runtime command path. Modal overlays remain unchanged.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `bun test src/core/services/overlay-window.test.ts src/main/runtime/overlay-window-factory.test.ts src/main/runtime/overlay-window-factory-main-deps.test.ts src/main/runtime/overlay-window-runtime-handlers.test.ts`
|
||||||
|
- `bun x tsc --noEmit`
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Visible overlay focus no longer blocks the default AniSkip `Tab` binding. Bare `Tab` is now forwarded straight to mpv while the visible overlay is active, and modal overlays still retain their own normal focus behavior.
|
||||||
|
|
||||||
|
Added regression coverage for both the input-routing decision and the runtime plumbing that carries the new mpv forwarder into overlay window creation.
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
id: TASK-148
|
||||||
|
title: Fix Windows plugin env binary override resolution
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 00:00'
|
||||||
|
updated_date: '2026-03-09 00:00'
|
||||||
|
labels:
|
||||||
|
- windows
|
||||||
|
- plugin
|
||||||
|
- regression
|
||||||
|
dependencies: []
|
||||||
|
priority: medium
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
|
||||||
|
Fix the mpv plugin's Windows binary override lookup so `SUBMINER_BINARY_PATH` still resolves when `SUBMINER_APPIMAGE_PATH` is unset. The current Lua resolver builds an array with a leading `nil`, which causes `ipairs` iteration to stop before the later Windows override candidate.
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
|
||||||
|
- [x] #1 `scripts/test-plugin-binary-windows.lua` passes the env override regression that expects `.exe` suffix resolution from `SUBMINER_BINARY_PATH`.
|
||||||
|
- [x] #2 Existing plugin start/binary test gate stays green after the fix.
|
||||||
|
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
|
||||||
|
Updated `plugin/subminer/binary.lua` so env override lookup checks `SUBMINER_APPIMAGE_PATH` and `SUBMINER_BINARY_PATH` sequentially instead of via a Lua array literal that truncates at the first `nil`. This restores Windows `.exe` suffix resolution for `SUBMINER_BINARY_PATH` when the AppImage env var is unset.
|
||||||
|
|
||||||
|
Verification:
|
||||||
|
|
||||||
|
- `lua scripts/test-plugin-binary-windows.lua`
|
||||||
|
- `bun run test:plugin:src`
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -0,0 +1,71 @@
|
|||||||
|
---
|
||||||
|
id: TASK-149
|
||||||
|
title: Cut patch release v0.5.5 for character dictionary updates and release guarding
|
||||||
|
status: Done
|
||||||
|
assignee:
|
||||||
|
- codex
|
||||||
|
created_date: '2026-03-09 01:10'
|
||||||
|
updated_date: '2026-03-09 01:14'
|
||||||
|
labels:
|
||||||
|
- release
|
||||||
|
- patch
|
||||||
|
dependencies:
|
||||||
|
- TASK-140
|
||||||
|
- TASK-141
|
||||||
|
- TASK-142
|
||||||
|
- TASK-143
|
||||||
|
- TASK-144
|
||||||
|
- TASK-145
|
||||||
|
- TASK-146
|
||||||
|
- TASK-148
|
||||||
|
references:
|
||||||
|
- package.json
|
||||||
|
- CHANGELOG.md
|
||||||
|
- scripts/build-changelog.ts
|
||||||
|
- scripts/build-changelog.test.ts
|
||||||
|
- docs/RELEASING.md
|
||||||
|
priority: high
|
||||||
|
---
|
||||||
|
|
||||||
|
## Description
|
||||||
|
|
||||||
|
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||||
|
Prepare and publish patch release `v0.5.5` after the failed `v0.5.4` tag by aligning package version metadata, generating committed changelog output from the pending release fragments, and hardening release validation so a future tag cannot ship with a mismatched `package.json` version.
|
||||||
|
<!-- SECTION:DESCRIPTION:END -->
|
||||||
|
|
||||||
|
## Acceptance Criteria
|
||||||
|
<!-- AC:BEGIN -->
|
||||||
|
- [x] #1 Repository version metadata is updated to `0.5.5`.
|
||||||
|
- [x] #2 `CHANGELOG.md` contains the committed `v0.5.5` section and the consumed fragments are removed.
|
||||||
|
- [x] #3 Release validation rejects a requested release version when it differs from `package.json`.
|
||||||
|
- [x] #4 Release docs capture the required version/changelog prep before tagging.
|
||||||
|
- [x] #5 New `v0.5.5` release-prep commit and tag are pushed to `origin/main`.
|
||||||
|
<!-- AC:END -->
|
||||||
|
|
||||||
|
## Implementation Plan
|
||||||
|
|
||||||
|
<!-- SECTION:PLAN:BEGIN -->
|
||||||
|
1. Add a regression test for tagged-release/package version mismatch.
|
||||||
|
2. Update changelog validation to reject mismatched explicit release versions.
|
||||||
|
3. Bump `package.json`, generate committed `v0.5.5` changelog output, and remove consumed fragments.
|
||||||
|
4. Add a short `docs/RELEASING.md` checklist for the prep flow.
|
||||||
|
5. Run release verification, commit, tag, and push.
|
||||||
|
<!-- SECTION:PLAN:END -->
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
<!-- SECTION:NOTES:BEGIN -->
|
||||||
|
Added a regression test in `scripts/build-changelog.test.ts` that proves `changelog:check --version ...` rejects tag/package mismatches. Updated `scripts/build-changelog.ts` so tagged release validation now compares the explicit requested version against `package.json` before looking for pending fragments or the committed changelog section.
|
||||||
|
|
||||||
|
Bumped `package.json` from `0.5.3` to `0.5.5`, ran `bun run changelog:build --version 0.5.5 --date 2026-03-09`, and committed the generated `CHANGELOG.md` output while removing the consumed task fragments. Added `docs/RELEASING.md` with the required release-prep checklist so version bump + changelog generation happen before tagging.
|
||||||
|
|
||||||
|
Verification: `bun run changelog:lint`, `bun run changelog:check --version 0.5.5`, `bun run typecheck`, `bun run test:fast`, and `bun test scripts/build-changelog.test.ts src/release-workflow.test.ts`. `bun run format:check` still reports many unrelated pre-existing repo-wide Prettier warnings, so touched files were checked/formatted separately with `bunx prettier`.
|
||||||
|
<!-- SECTION:NOTES:END -->
|
||||||
|
|
||||||
|
## Final Summary
|
||||||
|
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||||
|
Prepared patch release `v0.5.5` after the failed `v0.5.4` release attempt. Release metadata now matches the upcoming tag, the pending character-dictionary/overlay/plugin fragments are committed into `CHANGELOG.md`, and release validation now blocks future tag/package mismatches before publish.
|
||||||
|
|
||||||
|
Docs now include a short release checklist in `docs/RELEASING.md`. Validation passed for changelog lint/check, typecheck, targeted workflow tests, and the full fast test suite. Repo-wide Prettier remains noisy from unrelated existing files, but touched release files were formatted and verified.
|
||||||
|
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
type: fixed
|
|
||||||
area: launcher
|
|
||||||
|
|
||||||
- Hardened YouTube AI subtitle fixing so fenced SRT output and text-only one-cue-per-block responses can still be applied without losing original cue timing.
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
type: fixed
|
|
||||||
area: launcher
|
|
||||||
|
|
||||||
- Skipped AniSkip lookup during URL playback and YouTube subtitle-preload playback, limiting AniSkip to local file targets where it can actually resolve anime metadata.
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
type: fixed
|
|
||||||
area: launcher
|
|
||||||
|
|
||||||
- Keep the background SubMiner process running after a launcher-managed mpv session exits so the next mpv instance can reconnect without restarting the app.
|
|
||||||
- Reuse prior tokenization readiness after the background app is already warm so reopening a video does not pause again waiting for duplicate warmup completion.
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
type: fixed
|
|
||||||
area: windows
|
|
||||||
|
|
||||||
- Acquire the app single-instance lock earlier so Windows overlay/video launches reuse the running background SubMiner process instead of booting a second full app and repeating startup warmups.
|
|
||||||
@@ -1,4 +0,0 @@
|
|||||||
type: changed
|
|
||||||
area: launcher
|
|
||||||
|
|
||||||
- Removed the YouTube subtitle generation mode switch so YouTube playback always preloads subtitles before mpv starts.
|
|
||||||
@@ -2,9 +2,10 @@
|
|||||||
* SubMiner Example Configuration File
|
* SubMiner Example Configuration File
|
||||||
*
|
*
|
||||||
* This file is auto-generated from src/config/definitions.ts.
|
* This file is auto-generated from src/config/definitions.ts.
|
||||||
* Copy to $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) and edit as needed.
|
* Copy to %APPDATA%/SubMiner/config.jsonc on Windows, or $XDG_CONFIG_HOME/SubMiner/config.jsonc (or ~/.config/SubMiner/config.jsonc) on Linux/macOS.
|
||||||
*/
|
*/
|
||||||
{
|
{
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
// Overlay Auto-Start
|
// Overlay Auto-Start
|
||||||
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
// When overlay connects to mpv, automatically show overlay and hide mpv subtitles.
|
||||||
@@ -17,7 +18,7 @@
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
"texthooker": {
|
"texthooker": {
|
||||||
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
|
"launchAtStartup": true, // Launch texthooker server automatically when SubMiner starts. Values: true | false
|
||||||
"openBrowser": true, // Open browser setting. Values: true | false
|
"openBrowser": true // Open browser setting. Values: true | false
|
||||||
}, // Configure texthooker startup launch and browser opening behavior.
|
}, // Configure texthooker startup launch and browser opening behavior.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -27,7 +28,7 @@
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
"websocket": {
|
"websocket": {
|
||||||
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
"enabled": "auto", // Built-in subtitle websocket server mode. Values: auto | true | false
|
||||||
"port": 6677, // Built-in subtitle websocket server port.
|
"port": 6677 // Built-in subtitle websocket server port.
|
||||||
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
}, // Built-in WebSocket server broadcasts subtitle text to connected clients.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -37,7 +38,7 @@
|
|||||||
// ==========================================
|
// ==========================================
|
||||||
"annotationWebsocket": {
|
"annotationWebsocket": {
|
||||||
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
|
"enabled": true, // Annotated subtitle websocket server enabled state. Values: true | false
|
||||||
"port": 6678, // Annotated subtitle websocket server port.
|
"port": 6678 // Annotated subtitle websocket server port.
|
||||||
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
}, // Dedicated annotated subtitle websocket for bundled texthooker and token-aware clients.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -46,7 +47,7 @@
|
|||||||
// Set to debug for full runtime diagnostics.
|
// Set to debug for full runtime diagnostics.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "info", // Minimum log level for runtime logging. Values: debug | info | warn | error
|
"level": "info" // Minimum log level for runtime logging. Values: debug | info | warn | error
|
||||||
}, // Controls logging verbosity.
|
}, // Controls logging verbosity.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -60,7 +61,7 @@
|
|||||||
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
|
"mecab": true, // Warm up MeCab tokenizer at startup. Values: true | false
|
||||||
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
|
"yomitanExtension": true, // Warm up Yomitan extension at startup. Values: true | false
|
||||||
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
|
"subtitleDictionaries": true, // Warm up subtitle dictionaries at startup. Values: true | false
|
||||||
"jellyfinRemoteSession": true, // Warm up Jellyfin remote session at startup. Values: true | false
|
"jellyfinRemoteSession": true // Warm up Jellyfin remote session at startup. Values: true | false
|
||||||
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
}, // Background warmup controls for MeCab, Yomitan, dictionaries, and Jellyfin session.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -81,7 +82,7 @@
|
|||||||
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
"toggleSecondarySub": "CommandOrControl+Shift+V", // Toggle secondary sub setting.
|
||||||
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
"markAudioCard": "CommandOrControl+Shift+A", // Mark audio card setting.
|
||||||
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
"openRuntimeOptions": "CommandOrControl+Shift+O", // Open runtime options setting.
|
||||||
"openJimaku": "Ctrl+Shift+J", // Open jimaku setting.
|
"openJimaku": "Ctrl+Shift+J" // Open jimaku setting.
|
||||||
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -101,7 +102,7 @@
|
|||||||
"secondarySub": {
|
"secondarySub": {
|
||||||
"secondarySubLanguages": [], // Secondary sub languages setting.
|
"secondarySubLanguages": [], // Secondary sub languages setting.
|
||||||
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
"autoLoadSecondarySub": false, // Auto load secondary sub setting. Values: true | false
|
||||||
"defaultMode": "hover", // Default mode setting.
|
"defaultMode": "hover" // Default mode setting.
|
||||||
}, // Dual subtitle track options.
|
}, // Dual subtitle track options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -113,7 +114,7 @@
|
|||||||
"alass_path": "", // Alass path setting.
|
"alass_path": "", // Alass path setting.
|
||||||
"ffsubsync_path": "", // Ffsubsync path setting.
|
"ffsubsync_path": "", // Ffsubsync path setting.
|
||||||
"ffmpeg_path": "", // Ffmpeg path setting.
|
"ffmpeg_path": "", // Ffmpeg path setting.
|
||||||
"replace": true, // Replace the active subtitle file when sync completes. Values: true | false
|
"replace": true // Replace the active subtitle file when sync completes. Values: true | false
|
||||||
}, // Subsync engine and executable paths.
|
}, // Subsync engine and executable paths.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -121,7 +122,7 @@
|
|||||||
// Initial vertical subtitle position from the bottom.
|
// Initial vertical subtitle position from the bottom.
|
||||||
// ==========================================
|
// ==========================================
|
||||||
"subtitlePosition": {
|
"subtitlePosition": {
|
||||||
"yPercent": 10, // Y percent setting.
|
"yPercent": 10 // Y percent setting.
|
||||||
}, // Initial vertical subtitle position from the bottom.
|
}, // Initial vertical subtitle position from the bottom.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -158,7 +159,7 @@
|
|||||||
"N2": "#f5a97f", // N2 setting.
|
"N2": "#f5a97f", // N2 setting.
|
||||||
"N3": "#f9e2af", // N3 setting.
|
"N3": "#f9e2af", // N3 setting.
|
||||||
"N4": "#a6e3a1", // N4 setting.
|
"N4": "#a6e3a1", // N4 setting.
|
||||||
"N5": "#8aadf4", // N5 setting.
|
"N5": "#8aadf4" // N5 setting.
|
||||||
}, // Jlpt colors setting.
|
}, // Jlpt colors setting.
|
||||||
"frequencyDictionary": {
|
"frequencyDictionary": {
|
||||||
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
"enabled": false, // Enable frequency-dictionary-based highlighting based on token rank. Values: true | false
|
||||||
@@ -167,7 +168,13 @@
|
|||||||
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
|
||||||
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
|
"matchMode": "headword", // headword: frequency lookup uses dictionary form. surface: lookup uses subtitle-visible token text. Values: headword | surface
|
||||||
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
|
||||||
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#8bd5ca", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
"bandedColors": [
|
||||||
|
"#ed8796",
|
||||||
|
"#f5a97f",
|
||||||
|
"#f9e2af",
|
||||||
|
"#8bd5ca",
|
||||||
|
"#8aadf4"
|
||||||
|
] // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
|
||||||
}, // Frequency dictionary setting.
|
}, // Frequency dictionary setting.
|
||||||
"secondary": {
|
"secondary": {
|
||||||
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.
|
||||||
@@ -182,8 +189,8 @@
|
|||||||
"backgroundColor": "rgba(20, 22, 34, 0.78)", // Background color setting.
|
"backgroundColor": "rgba(20, 22, 34, 0.78)", // Background color setting.
|
||||||
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
"backdropFilter": "blur(6px)", // Backdrop filter setting.
|
||||||
"fontWeight": "600", // Font weight setting.
|
"fontWeight": "600", // Font weight setting.
|
||||||
"fontStyle": "normal", // Font style setting.
|
"fontStyle": "normal" // Font style setting.
|
||||||
}, // Secondary setting.
|
} // Secondary setting.
|
||||||
}, // Primary and secondary subtitle styling.
|
}, // Primary and secondary subtitle styling.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -194,8 +201,10 @@
|
|||||||
"enabled": false, // Enable shared OpenAI-compatible AI provider features. Values: true | false
|
"enabled": false, // Enable shared OpenAI-compatible AI provider features. Values: true | false
|
||||||
"apiKey": "", // Static API key for the shared OpenAI-compatible AI provider.
|
"apiKey": "", // Static API key for the shared OpenAI-compatible AI provider.
|
||||||
"apiKeyCommand": "", // Shell command used to resolve the shared AI provider API key.
|
"apiKeyCommand": "", // Shell command used to resolve the shared AI provider API key.
|
||||||
|
"model": "openai/gpt-4o-mini", // Model setting.
|
||||||
"baseUrl": "https://openrouter.ai/api", // Base URL for the shared OpenAI-compatible AI provider.
|
"baseUrl": "https://openrouter.ai/api", // Base URL for the shared OpenAI-compatible AI provider.
|
||||||
"requestTimeoutMs": 15000, // Timeout in milliseconds for shared AI provider requests.
|
"systemPrompt": "You are a translation engine. Return only the translated text with no explanations.", // System prompt setting.
|
||||||
|
"requestTimeoutMs": 15000 // Timeout in milliseconds for shared AI provider requests.
|
||||||
}, // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
|
}, // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -213,20 +222,22 @@
|
|||||||
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
"enabled": true, // Enable local AnkiConnect-compatible proxy for push-based auto-enrichment. Values: true | false
|
||||||
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
"host": "127.0.0.1", // Bind host for local AnkiConnect proxy.
|
||||||
"port": 8766, // Bind port for local AnkiConnect proxy.
|
"port": 8766, // Bind port for local AnkiConnect proxy.
|
||||||
"upstreamUrl": "http://127.0.0.1:8765", // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
"upstreamUrl": "http://127.0.0.1:8765" // Upstream AnkiConnect URL proxied by local AnkiConnect proxy.
|
||||||
}, // Proxy setting.
|
}, // Proxy setting.
|
||||||
"tags": ["SubMiner"], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
"tags": [
|
||||||
|
"SubMiner"
|
||||||
|
], // Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.
|
||||||
"fields": {
|
"fields": {
|
||||||
"audio": "ExpressionAudio", // Audio setting.
|
"audio": "ExpressionAudio", // Audio setting.
|
||||||
"image": "Picture", // Image setting.
|
"image": "Picture", // Image setting.
|
||||||
"sentence": "Sentence", // Sentence setting.
|
"sentence": "Sentence", // Sentence setting.
|
||||||
"miscInfo": "MiscInfo", // Misc info setting.
|
"miscInfo": "MiscInfo", // Misc info setting.
|
||||||
"translation": "SelectionText", // Translation setting.
|
"translation": "SelectionText" // Translation setting.
|
||||||
}, // Fields setting.
|
}, // Fields setting.
|
||||||
"ai": {
|
"ai": {
|
||||||
"enabled": false, // Enable AI provider usage for Anki translation/enrichment flows. Values: true | false
|
"enabled": false, // Enable AI provider usage for Anki translation/enrichment flows. Values: true | false
|
||||||
"model": "", // Optional model override for Anki AI translation/enrichment flows.
|
"model": "", // Optional model override for Anki AI translation/enrichment flows.
|
||||||
"systemPrompt": "", // Optional system prompt override for Anki AI translation/enrichment flows.
|
"systemPrompt": "" // Optional system prompt override for Anki AI translation/enrichment flows.
|
||||||
}, // Ai setting.
|
}, // Ai setting.
|
||||||
"media": {
|
"media": {
|
||||||
"generateAudio": true, // Generate audio setting. Values: true | false
|
"generateAudio": true, // Generate audio setting. Values: true | false
|
||||||
@@ -239,7 +250,7 @@
|
|||||||
"animatedCrf": 35, // Animated crf setting.
|
"animatedCrf": 35, // Animated crf setting.
|
||||||
"audioPadding": 0.5, // Audio padding setting.
|
"audioPadding": 0.5, // Audio padding setting.
|
||||||
"fallbackDuration": 3, // Fallback duration setting.
|
"fallbackDuration": 3, // Fallback duration setting.
|
||||||
"maxMediaDuration": 30, // Max media duration setting.
|
"maxMediaDuration": 30 // Max media duration setting.
|
||||||
}, // Media setting.
|
}, // Media setting.
|
||||||
"behavior": {
|
"behavior": {
|
||||||
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
"overwriteAudio": true, // Overwrite audio setting. Values: true | false
|
||||||
@@ -247,7 +258,7 @@
|
|||||||
"mediaInsertMode": "append", // Media insert mode setting.
|
"mediaInsertMode": "append", // Media insert mode setting.
|
||||||
"highlightWord": true, // Highlight word setting. Values: true | false
|
"highlightWord": true, // Highlight word setting. Values: true | false
|
||||||
"notificationType": "osd", // Notification type setting.
|
"notificationType": "osd", // Notification type setting.
|
||||||
"autoUpdateNewCards": true, // Automatically update newly added cards. Values: true | false
|
"autoUpdateNewCards": true // Automatically update newly added cards. Values: true | false
|
||||||
}, // Behavior setting.
|
}, // Behavior setting.
|
||||||
"nPlusOne": {
|
"nPlusOne": {
|
||||||
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
"highlightEnabled": false, // Enable fast local highlighting for words already known in Anki. Values: true | false
|
||||||
@@ -256,20 +267,20 @@
|
|||||||
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
"decks": [], // Decks used for N+1 known-word cache scope. Supports one or more deck names.
|
||||||
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
"minSentenceWords": 3, // Minimum sentence word count required for N+1 targeting (default: 3).
|
||||||
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
"nPlusOne": "#c6a0f6", // Color used for the single N+1 target token highlight.
|
||||||
"knownWord": "#a6da95", // Color used for legacy known-word highlights.
|
"knownWord": "#a6da95" // Color used for legacy known-word highlights.
|
||||||
}, // N plus one setting.
|
}, // N plus one setting.
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"pattern": "[SubMiner] %f (%t)", // Pattern setting.
|
"pattern": "[SubMiner] %f (%t)" // Pattern setting.
|
||||||
}, // Metadata setting.
|
}, // Metadata setting.
|
||||||
"isLapis": {
|
"isLapis": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"sentenceCardModel": "Japanese sentences", // Sentence card model setting.
|
"sentenceCardModel": "Japanese sentences" // Sentence card model setting.
|
||||||
}, // Is lapis setting.
|
}, // Is lapis setting.
|
||||||
"isKiku": {
|
"isKiku": {
|
||||||
"enabled": false, // Enabled setting. Values: true | false
|
"enabled": false, // Enabled setting. Values: true | false
|
||||||
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
"fieldGrouping": "disabled", // Kiku duplicate-card field grouping mode. Values: auto | manual | disabled
|
||||||
"deleteDuplicateInAuto": true, // Delete duplicate in auto setting. Values: true | false
|
"deleteDuplicateInAuto": true // Delete duplicate in auto setting. Values: true | false
|
||||||
}, // Is kiku setting.
|
} // Is kiku setting.
|
||||||
}, // Automatic Anki updates and media generation options.
|
}, // Automatic Anki updates and media generation options.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -279,7 +290,7 @@
|
|||||||
"jimaku": {
|
"jimaku": {
|
||||||
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
"apiBaseUrl": "https://jimaku.cc", // Api base url setting.
|
||||||
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
"languagePreference": "ja", // Preferred language used in Jimaku search. Values: ja | en | none
|
||||||
"maxEntryResults": 10, // Maximum Jimaku search results returned.
|
"maxEntryResults": 10 // Maximum Jimaku search results returned.
|
||||||
}, // Jimaku API configuration and defaults.
|
}, // Jimaku API configuration and defaults.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -294,9 +305,12 @@
|
|||||||
"fixWithAi": false, // Use shared AI provider to post-process whisper-generated YouTube subtitles. Values: true | false
|
"fixWithAi": false, // Use shared AI provider to post-process whisper-generated YouTube subtitles. Values: true | false
|
||||||
"ai": {
|
"ai": {
|
||||||
"model": "", // Optional model override for YouTube subtitle AI post-processing.
|
"model": "", // Optional model override for YouTube subtitle AI post-processing.
|
||||||
"systemPrompt": "", // Optional system prompt override for YouTube subtitle AI post-processing.
|
"systemPrompt": "" // Optional system prompt override for YouTube subtitle AI post-processing.
|
||||||
}, // Ai setting.
|
}, // Ai setting.
|
||||||
"primarySubLanguages": ["ja", "jpn"], // Comma-separated primary subtitle language priority used by the launcher.
|
"primarySubLanguages": [
|
||||||
|
"ja",
|
||||||
|
"jpn"
|
||||||
|
] // Comma-separated primary subtitle language priority used by the launcher.
|
||||||
}, // Defaults for SubMiner YouTube subtitle generation.
|
}, // Defaults for SubMiner YouTube subtitle generation.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -317,9 +331,9 @@
|
|||||||
"collapsibleSections": {
|
"collapsibleSections": {
|
||||||
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
|
"description": false, // Open the Description section by default in character dictionary glossary entries. Values: true | false
|
||||||
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
|
"characterInformation": false, // Open the Character Information section by default in character dictionary glossary entries. Values: true | false
|
||||||
"voicedBy": false, // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
|
"voicedBy": false // Open the Voiced by section by default in character dictionary glossary entries. Values: true | false
|
||||||
}, // Collapsible sections setting.
|
} // Collapsible sections setting.
|
||||||
}, // Character dictionary setting.
|
} // Character dictionary setting.
|
||||||
}, // Anilist API credentials and update behavior.
|
}, // Anilist API credentials and update behavior.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -343,8 +357,16 @@
|
|||||||
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
"pullPictures": false, // Enable Jellyfin poster/icon fetching for launcher menus. Values: true | false
|
||||||
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
"iconCacheDir": "/tmp/subminer-jellyfin-icons", // Directory used by launcher for cached Jellyfin poster icons.
|
||||||
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
"directPlayPreferred": true, // Try direct play before server-managed transcoding when possible. Values: true | false
|
||||||
"directPlayContainers": ["mkv", "mp4", "webm", "mov", "flac", "mp3", "aac"], // Container allowlist for direct play decisions.
|
"directPlayContainers": [
|
||||||
"transcodeVideoCodec": "h264", // Preferred transcode video codec when direct play is unavailable.
|
"mkv",
|
||||||
|
"mp4",
|
||||||
|
"webm",
|
||||||
|
"mov",
|
||||||
|
"flac",
|
||||||
|
"mp3",
|
||||||
|
"aac"
|
||||||
|
], // Container allowlist for direct play decisions.
|
||||||
|
"transcodeVideoCodec": "h264" // Preferred transcode video codec when direct play is unavailable.
|
||||||
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
}, // Optional Jellyfin integration for auth, browsing, and playback launch.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -355,7 +377,7 @@
|
|||||||
"discordPresence": {
|
"discordPresence": {
|
||||||
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
"enabled": false, // Enable optional Discord Rich Presence updates. Values: true | false
|
||||||
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
"updateIntervalMs": 3000, // Minimum interval between presence payload updates.
|
||||||
"debounceMs": 750, // Debounce delay used to collapse bursty presence updates.
|
"debounceMs": 750 // Debounce delay used to collapse bursty presence updates.
|
||||||
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
}, // Optional Discord Rich Presence activity card updates for current playback/study session.
|
||||||
|
|
||||||
// ==========================================
|
// ==========================================
|
||||||
@@ -377,7 +399,7 @@
|
|||||||
"telemetryDays": 30, // Telemetry retention window in days.
|
"telemetryDays": 30, // Telemetry retention window in days.
|
||||||
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
"dailyRollupsDays": 365, // Daily rollup retention window in days.
|
||||||
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
"monthlyRollupsDays": 1825, // Monthly rollup retention window in days.
|
||||||
"vacuumIntervalDays": 7, // Minimum days between VACUUM runs.
|
"vacuumIntervalDays": 7 // Minimum days between VACUUM runs.
|
||||||
}, // Retention setting.
|
} // Retention setting.
|
||||||
}, // Enable/disable immersion tracking.
|
} // Enable/disable immersion tracking.
|
||||||
}
|
}
|
||||||
|
|||||||
21
docs/RELEASING.md
Normal file
21
docs/RELEASING.md
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
<!-- read_when: cutting a tagged release or debugging release prep -->
|
||||||
|
|
||||||
|
# Releasing
|
||||||
|
|
||||||
|
1. Confirm `main` is green: `gh run list --workflow CI --limit 5`.
|
||||||
|
2. Bump `package.json` to the release version.
|
||||||
|
3. Build release metadata before tagging:
|
||||||
|
`bun run changelog:build --version <version>`
|
||||||
|
4. Review `CHANGELOG.md`.
|
||||||
|
5. Run release gate locally:
|
||||||
|
`bun run changelog:check --version <version>`
|
||||||
|
`bun run test:fast`
|
||||||
|
`bun run typecheck`
|
||||||
|
6. Commit release prep.
|
||||||
|
7. Tag the commit: `git tag v<version>`.
|
||||||
|
8. Push commit + tag.
|
||||||
|
|
||||||
|
Notes:
|
||||||
|
|
||||||
|
- `changelog:check` now rejects tag/package version mismatches.
|
||||||
|
- Do not tag while `changes/*.md` fragments still exist.
|
||||||
17
package.json
17
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "subminer",
|
"name": "subminer",
|
||||||
"version": "0.5.0",
|
"version": "0.5.5",
|
||||||
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
||||||
"packageManager": "bun@1.3.5",
|
"packageManager": "bun@1.3.5",
|
||||||
"main": "dist/main-entry.js",
|
"main": "dist/main-entry.js",
|
||||||
@@ -30,8 +30,8 @@
|
|||||||
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
|
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
|
||||||
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
||||||
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/mpv.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
||||||
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts",
|
||||||
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
|
"test:core:dist": "bun test dist/cli/args.test.js dist/cli/help.test.js dist/core/services/cli-command.test.js dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/field-grouping-overlay.test.js dist/core/services/numeric-shortcut-session.test.js dist/core/services/secondary-subtitle.test.js dist/core/services/mpv-render-metrics.test.js dist/core/services/overlay-content-measurement.test.js dist/core/services/mpv-control.test.js dist/core/services/mpv.test.js dist/core/services/runtime-options-ipc.test.js dist/core/services/runtime-config.test.js dist/core/services/yomitan-extension-paths.test.js dist/core/services/config-hot-reload.test.js dist/core/services/discord-presence.test.js dist/core/services/tokenizer.test.js dist/core/services/tokenizer/annotation-stage.test.js dist/core/services/tokenizer/parser-selection-stage.test.js dist/core/services/tokenizer/parser-enrichment-stage.test.js dist/core/services/subsync.test.js dist/core/services/overlay-bridge.test.js dist/core/services/overlay-manager.test.js dist/core/services/overlay-shortcut-handler.test.js dist/core/services/mining.test.js dist/core/services/anki-jimaku.test.js dist/core/services/jimaku-download-path.test.js dist/core/services/jellyfin.test.js dist/core/services/jellyfin-remote.test.js dist/core/services/immersion-tracker-service.test.js dist/core/services/overlay-runtime-init.test.js dist/core/services/app-ready.test.js dist/core/services/startup-bootstrap.test.js dist/core/services/subtitle-processing-controller.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/anilist/anilist-update-queue.test.js dist/renderer/error-recovery.test.js dist/renderer/subtitle-render.test.js dist/renderer/handlers/mouse.test.js dist/renderer/handlers/keyboard.test.js dist/renderer/modals/jimaku.test.js dist/subsync/utils.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/hyprland-tracker.test.js dist/window-trackers/x11-tracker.test.js dist/window-trackers/windows-helper.test.js dist/window-trackers/windows-tracker.test.js",
|
||||||
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
"test:core:smoke:dist": "bun test dist/cli/help.test.js dist/core/services/runtime-config.test.js dist/core/services/ipc.test.js dist/core/services/overlay-manager.test.js dist/core/services/anilist/anilist-token-store.test.js dist/core/services/startup-bootstrap.test.js dist/renderer/error-recovery.test.js dist/main/anilist-url-guard.test.js dist/window-trackers/x11-tracker.test.js",
|
||||||
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
|
||||||
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
|
||||||
@@ -56,11 +56,12 @@
|
|||||||
"dev": "bun run build && electron . --start --dev",
|
"dev": "bun run build && electron . --start --dev",
|
||||||
"stop": "electron . --stop",
|
"stop": "electron . --stop",
|
||||||
"toggle": "electron . --toggle",
|
"toggle": "electron . --toggle",
|
||||||
"build:appimage": "bun run build && electron-builder --linux AppImage",
|
"build:appimage": "bun run build && electron-builder --linux AppImage --publish never",
|
||||||
"build:mac": "bun run build && electron-builder --mac dmg zip",
|
"build:mac": "bun run build && electron-builder --mac dmg zip --publish never",
|
||||||
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip",
|
"build:mac:unsigned": "bun run build && env -u APPLE_ID -u APPLE_APP_SPECIFIC_PASSWORD -u APPLE_TEAM_ID -u CSC_LINK -u CSC_KEY_PASSWORD CSC_IDENTITY_AUTO_DISCOVERY=false electron-builder --mac dmg zip --publish never",
|
||||||
"build:mac:zip": "bun run build && electron-builder --mac zip",
|
"build:mac:zip": "bun run build && electron-builder --mac zip --publish never",
|
||||||
"build:win": "bun run build && electron-builder --win nsis zip"
|
"build:win": "bun run build && electron-builder --win nsis zip --publish never",
|
||||||
|
"build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs"
|
||||||
},
|
},
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"anki",
|
"anki",
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ aniskip_show_button=yes
|
|||||||
aniskip_button_text=You can skip by pressing %s
|
aniskip_button_text=You can skip by pressing %s
|
||||||
|
|
||||||
# Keybinding to execute intro skip when button is visible.
|
# Keybinding to execute intro skip when button is visible.
|
||||||
aniskip_button_key=y-k
|
aniskip_button_key=TAB
|
||||||
|
|
||||||
# OSD hint duration in seconds (shown during first 3s of intro).
|
# OSD hint duration in seconds (shown during first 3s of intro).
|
||||||
aniskip_button_duration=3
|
aniskip_button_duration=3
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
local matcher = require("aniskip_match")
|
local matcher = require("aniskip_match")
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
|
||||||
function M.create(ctx)
|
function M.create(ctx)
|
||||||
local mp = ctx.mp
|
local mp = ctx.mp
|
||||||
@@ -464,7 +465,7 @@ function M.create(ctx)
|
|||||||
local intro_start = state.aniskip.intro_start or -1
|
local intro_start = state.aniskip.intro_start or -1
|
||||||
local hint_window_end = intro_start + 3
|
local hint_window_end = intro_start + 3
|
||||||
if in_intro and not state.aniskip.prompt_shown and now >= intro_start and now < hint_window_end then
|
if in_intro and not state.aniskip.prompt_shown and now >= intro_start and now < hint_window_end then
|
||||||
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or "y-k"
|
local key = opts.aniskip_button_key ~= "" and opts.aniskip_button_key or DEFAULT_ANISKIP_BUTTON_KEY
|
||||||
local message = string.format(opts.aniskip_button_text, key)
|
local message = string.format(opts.aniskip_button_text, key)
|
||||||
mp.osd_message(message, tonumber(opts.aniskip_button_duration) or 3)
|
mp.osd_message(message, tonumber(opts.aniskip_button_duration) or 3)
|
||||||
state.aniskip.prompt_shown = true
|
state.aniskip.prompt_shown = true
|
||||||
|
|||||||
@@ -107,12 +107,8 @@ function M.create(ctx)
|
|||||||
end
|
end
|
||||||
|
|
||||||
local function find_binary_override()
|
local function find_binary_override()
|
||||||
local candidates = {
|
for _, env_name in ipairs({ "SUBMINER_APPIMAGE_PATH", "SUBMINER_BINARY_PATH" }) do
|
||||||
resolve_binary_candidate(os.getenv("SUBMINER_APPIMAGE_PATH")),
|
local path = resolve_binary_candidate(os.getenv(env_name))
|
||||||
resolve_binary_candidate(os.getenv("SUBMINER_BINARY_PATH")),
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, path in ipairs(candidates) do
|
|
||||||
if path and path ~= "" then
|
if path and path ~= "" then
|
||||||
return path
|
return path
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
|
||||||
local function normalize_socket_path_option(socket_path, default_socket_path)
|
local function normalize_socket_path_option(socket_path, default_socket_path)
|
||||||
if type(default_socket_path) ~= "string" then
|
if type(default_socket_path) ~= "string" then
|
||||||
@@ -42,7 +43,7 @@ function M.load(options_lib, default_socket_path)
|
|||||||
aniskip_payload = "",
|
aniskip_payload = "",
|
||||||
aniskip_show_button = true,
|
aniskip_show_button = true,
|
||||||
aniskip_button_text = "You can skip by pressing %s",
|
aniskip_button_text = "You can skip by pressing %s",
|
||||||
aniskip_button_key = "y-k",
|
aniskip_button_key = DEFAULT_ANISKIP_BUTTON_KEY,
|
||||||
aniskip_button_duration = 3,
|
aniskip_button_duration = 3,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
local M = {}
|
local M = {}
|
||||||
|
local DEFAULT_ANISKIP_BUTTON_KEY = "TAB"
|
||||||
|
local LEGACY_ANISKIP_BUTTON_KEY = "y-k"
|
||||||
|
|
||||||
function M.create(ctx)
|
function M.create(ctx)
|
||||||
local mp = ctx.mp
|
local mp = ctx.mp
|
||||||
@@ -89,8 +91,11 @@ function M.create(ctx)
|
|||||||
aniskip.skip_intro_now()
|
aniskip.skip_intro_now()
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
if opts.aniskip_button_key ~= "y-k" then
|
if
|
||||||
mp.add_key_binding("y-k", "subminer-skip-intro-fallback", function()
|
opts.aniskip_button_key ~= LEGACY_ANISKIP_BUTTON_KEY
|
||||||
|
and opts.aniskip_button_key ~= DEFAULT_ANISKIP_BUTTON_KEY
|
||||||
|
then
|
||||||
|
mp.add_key_binding(LEGACY_ANISKIP_BUTTON_KEY, "subminer-skip-intro-fallback", function()
|
||||||
aniskip.skip_intro_now()
|
aniskip.skip_intro_now()
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|||||||
@@ -34,12 +34,22 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
|
|||||||
const { writeChangelogArtifacts } = await loadModule();
|
const { writeChangelogArtifacts } = await loadModule();
|
||||||
const workspace = createWorkspace('write-artifacts');
|
const workspace = createWorkspace('write-artifacts');
|
||||||
const projectRoot = path.join(workspace, 'SubMiner');
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
const existingChangelog = ['# Changelog', '', '## v0.4.0 (2026-03-01)', '- Existing fix', ''].join('\n');
|
const existingChangelog = [
|
||||||
|
'# Changelog',
|
||||||
|
'',
|
||||||
|
'## v0.4.0 (2026-03-01)',
|
||||||
|
'- Existing fix',
|
||||||
|
'',
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
fs.mkdirSync(projectRoot, { recursive: true });
|
fs.mkdirSync(projectRoot, { recursive: true });
|
||||||
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
|
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
|
||||||
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n\nIgnored helper text.\n', 'utf8');
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', 'README.md'),
|
||||||
|
'# Changelog Fragments\n\nIgnored helper text.\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
fs.writeFileSync(
|
fs.writeFileSync(
|
||||||
path.join(projectRoot, 'changes', '001.md'),
|
path.join(projectRoot, 'changes', '001.md'),
|
||||||
['type: added', 'area: overlay', '', '- Added release fragments.'].join('\n'),
|
['type: added', 'area: overlay', '', '- Added release fragments.'].join('\n'),
|
||||||
@@ -59,13 +69,10 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
|
|||||||
});
|
});
|
||||||
|
|
||||||
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
|
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
|
||||||
assert.deepEqual(
|
assert.deepEqual(result.deletedFragmentPaths, [
|
||||||
result.deletedFragmentPaths,
|
|
||||||
[
|
|
||||||
path.join(projectRoot, 'changes', '001.md'),
|
path.join(projectRoot, 'changes', '001.md'),
|
||||||
path.join(projectRoot, 'changes', '002.md'),
|
path.join(projectRoot, 'changes', '002.md'),
|
||||||
],
|
]);
|
||||||
);
|
|
||||||
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
|
||||||
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '002.md')), false);
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '002.md')), false);
|
||||||
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', 'README.md')), true);
|
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', 'README.md')), true);
|
||||||
@@ -76,7 +83,10 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
|
|||||||
/^# Changelog\n\n## v0\.4\.1 \(2026-03-07\)\n\n### Added\n- Overlay: Added release fragments\.\n\n### Fixed\n- Release: Fixed release notes generation\.\n\n## v0\.4\.0 \(2026-03-01\)\n- Existing fix\n$/m,
|
/^# Changelog\n\n## v0\.4\.1 \(2026-03-07\)\n\n### Added\n- Overlay: Added release fragments\.\n\n### Fixed\n- Release: Fixed release notes generation\.\n\n## v0\.4\.0 \(2026-03-01\)\n- Existing fix\n$/m,
|
||||||
);
|
);
|
||||||
|
|
||||||
const releaseNotes = fs.readFileSync(path.join(projectRoot, 'release', 'release-notes.md'), 'utf8');
|
const releaseNotes = fs.readFileSync(
|
||||||
|
path.join(projectRoot, 'release', 'release-notes.md'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
assert.match(releaseNotes, /## Highlights\n### Added\n- Overlay: Added release fragments\./);
|
assert.match(releaseNotes, /## Highlights\n### Added\n- Overlay: Added release fragments\./);
|
||||||
assert.match(releaseNotes, /### Fixed\n- Release: Fixed release notes generation\./);
|
assert.match(releaseNotes, /### Fixed\n- Release: Fixed release notes generation\./);
|
||||||
assert.match(releaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
|
assert.match(releaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
|
||||||
@@ -92,7 +102,11 @@ test('verifyChangelogReadyForRelease ignores README but rejects pending fragment
|
|||||||
|
|
||||||
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
|
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
|
||||||
fs.writeFileSync(path.join(projectRoot, 'changes', 'README.md'), '# Changelog Fragments\n', 'utf8');
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'changes', 'README.md'),
|
||||||
|
'# Changelog Fragments\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), '- Pending fragment.\n', 'utf8');
|
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), '- Pending fragment.\n', 'utf8');
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -112,6 +126,33 @@ test('verifyChangelogReadyForRelease ignores README but rejects pending fragment
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('verifyChangelogReadyForRelease rejects explicit release versions that do not match package.json', async () => {
|
||||||
|
const { verifyChangelogReadyForRelease } = await loadModule();
|
||||||
|
const workspace = createWorkspace('verify-release-version-match');
|
||||||
|
const projectRoot = path.join(workspace, 'SubMiner');
|
||||||
|
|
||||||
|
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'package.json'),
|
||||||
|
JSON.stringify({ name: 'subminer', version: '0.4.0' }, null, 2),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(projectRoot, 'CHANGELOG.md'),
|
||||||
|
'# Changelog\n\n## v0.4.1 (2026-03-09)\n- Ready.\n',
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
assert.throws(
|
||||||
|
() => verifyChangelogReadyForRelease({ cwd: projectRoot, version: '0.4.1' }),
|
||||||
|
/package\.json version \(0\.4\.0\) does not match requested release version \(0\.4\.1\)/,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
fs.rmSync(workspace, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('verifyChangelogFragments rejects invalid metadata', async () => {
|
test('verifyChangelogFragments rejects invalid metadata', async () => {
|
||||||
const { verifyChangelogFragments } = await loadModule();
|
const { verifyChangelogFragments } = await loadModule();
|
||||||
const workspace = createWorkspace('lint-invalid');
|
const workspace = createWorkspace('lint-invalid');
|
||||||
|
|||||||
@@ -56,7 +56,10 @@ function resolveDate(date?: string): string {
|
|||||||
return date ?? new Date().toISOString().slice(0, 10);
|
return date ?? new Date().toISOString().slice(0, 10);
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolvePackageVersion(cwd: string, readFileSync: (candidate: string, encoding: BufferEncoding) => string): string {
|
function resolvePackageVersion(
|
||||||
|
cwd: string,
|
||||||
|
readFileSync: (candidate: string, encoding: BufferEncoding) => string,
|
||||||
|
): string {
|
||||||
const packageJsonPath = path.join(cwd, 'package.json');
|
const packageJsonPath = path.join(cwd, 'package.json');
|
||||||
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { version?: string };
|
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as { version?: string };
|
||||||
if (!packageJson.version) {
|
if (!packageJson.version) {
|
||||||
@@ -65,22 +68,42 @@ function resolvePackageVersion(cwd: string, readFileSync: (candidate: string, en
|
|||||||
return normalizeVersion(packageJson.version);
|
return normalizeVersion(packageJson.version);
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveVersion(
|
function resolveVersion(options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>): string {
|
||||||
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
|
|
||||||
): string {
|
|
||||||
const cwd = options.cwd ?? process.cwd();
|
const cwd = options.cwd ?? process.cwd();
|
||||||
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
|
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
|
||||||
return normalizeVersion(options.version ?? resolvePackageVersion(cwd, readFileSync));
|
return normalizeVersion(options.version ?? resolvePackageVersion(cwd, readFileSync));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function verifyRequestedVersionMatchesPackageVersion(
|
||||||
|
options: Pick<ChangelogOptions, 'cwd' | 'version' | 'deps'>,
|
||||||
|
): void {
|
||||||
|
if (!options.version) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const cwd = options.cwd ?? process.cwd();
|
||||||
|
const existsSync = options.deps?.existsSync ?? fs.existsSync;
|
||||||
|
const readFileSync = options.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
const packageJsonPath = path.join(cwd, 'package.json');
|
||||||
|
if (!existsSync(packageJsonPath)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const packageVersion = resolvePackageVersion(cwd, readFileSync);
|
||||||
|
const requestedVersion = normalizeVersion(options.version);
|
||||||
|
|
||||||
|
if (packageVersion !== requestedVersion) {
|
||||||
|
throw new Error(
|
||||||
|
`package.json version (${packageVersion}) does not match requested release version (${requestedVersion}).`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function resolveChangesDir(cwd: string): string {
|
function resolveChangesDir(cwd: string): string {
|
||||||
return path.join(cwd, 'changes');
|
return path.join(cwd, 'changes');
|
||||||
}
|
}
|
||||||
|
|
||||||
function resolveFragmentPaths(
|
function resolveFragmentPaths(cwd: string, deps?: ChangelogFsDeps): string[] {
|
||||||
cwd: string,
|
|
||||||
deps?: ChangelogFsDeps,
|
|
||||||
): string[] {
|
|
||||||
const changesDir = resolveChangesDir(cwd);
|
const changesDir = resolveChangesDir(cwd);
|
||||||
const existsSync = deps?.existsSync ?? fs.existsSync;
|
const existsSync = deps?.existsSync ?? fs.existsSync;
|
||||||
const readdirSync = deps?.readdirSync ?? fs.readdirSync;
|
const readdirSync = deps?.readdirSync ?? fs.readdirSync;
|
||||||
@@ -90,7 +113,10 @@ function resolveFragmentPaths(
|
|||||||
}
|
}
|
||||||
|
|
||||||
return readdirSync(changesDir, { withFileTypes: true })
|
return readdirSync(changesDir, { withFileTypes: true })
|
||||||
.filter((entry) => entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md')
|
.filter(
|
||||||
|
(entry) =>
|
||||||
|
entry.isFile() && entry.name.endsWith('.md') && entry.name.toLowerCase() !== 'readme.md',
|
||||||
|
)
|
||||||
.map((entry) => path.join(changesDir, entry.name))
|
.map((entry) => path.join(changesDir, entry.name))
|
||||||
.sort();
|
.sort();
|
||||||
}
|
}
|
||||||
@@ -112,7 +138,10 @@ function normalizeFragmentBullets(content: string): string[] {
|
|||||||
return lines;
|
return lines;
|
||||||
}
|
}
|
||||||
|
|
||||||
function parseFragmentMetadata(content: string, fragmentPath: string): {
|
function parseFragmentMetadata(
|
||||||
|
content: string,
|
||||||
|
fragmentPath: string,
|
||||||
|
): {
|
||||||
area: string;
|
area: string;
|
||||||
body: string;
|
body: string;
|
||||||
type: FragmentType;
|
type: FragmentType;
|
||||||
@@ -144,9 +173,7 @@ function parseFragmentMetadata(content: string, fragmentPath: string): {
|
|||||||
|
|
||||||
const type = metadata.get('type');
|
const type = metadata.get('type');
|
||||||
if (!type || !CHANGE_TYPES.includes(type as FragmentType)) {
|
if (!type || !CHANGE_TYPES.includes(type as FragmentType)) {
|
||||||
throw new Error(
|
throw new Error(`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`);
|
||||||
`${fragmentPath} must declare type as one of: ${CHANGE_TYPES.join(', ')}.`,
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const area = metadata.get('area');
|
const area = metadata.get('area');
|
||||||
@@ -166,10 +193,7 @@ function parseFragmentMetadata(content: string, fragmentPath: string): {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function readChangeFragments(
|
function readChangeFragments(cwd: string, deps?: ChangelogFsDeps): ChangeFragment[] {
|
||||||
cwd: string,
|
|
||||||
deps?: ChangelogFsDeps,
|
|
||||||
): ChangeFragment[] {
|
|
||||||
const readFileSync = deps?.readFileSync ?? fs.readFileSync;
|
const readFileSync = deps?.readFileSync ?? fs.readFileSync;
|
||||||
return resolveFragmentPaths(cwd, deps).map((fragmentPath) => {
|
return resolveFragmentPaths(cwd, deps).map((fragmentPath) => {
|
||||||
const parsed = parseFragmentMetadata(readFileSync(fragmentPath, 'utf8'), fragmentPath);
|
const parsed = parseFragmentMetadata(readFileSync(fragmentPath, 'utf8'), fragmentPath);
|
||||||
@@ -202,7 +226,9 @@ function renderGroupedChanges(fragments: ChangeFragment[]): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const bullets = typeFragments
|
const bullets = typeFragments
|
||||||
.flatMap((fragment) => fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)))
|
.flatMap((fragment) =>
|
||||||
|
fragment.bullets.map((bullet) => renderFragmentBullet(fragment, bullet)),
|
||||||
|
)
|
||||||
.join('\n');
|
.join('\n');
|
||||||
return [`### ${CHANGE_TYPE_HEADINGS[type]}\n${bullets}`];
|
return [`### ${CHANGE_TYPE_HEADINGS[type]}\n${bullets}`];
|
||||||
});
|
});
|
||||||
@@ -215,9 +241,7 @@ function buildReleaseSection(version: string, date: string, fragments: ChangeFra
|
|||||||
throw new Error('No changelog fragments found in changes/.');
|
throw new Error('No changelog fragments found in changes/.');
|
||||||
}
|
}
|
||||||
|
|
||||||
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join(
|
return [`## v${version} (${date})`, '', renderGroupedChanges(fragments), ''].join('\n');
|
||||||
'\n',
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureChangelogHeader(existingChangelog: string): string {
|
function ensureChangelogHeader(existingChangelog: string): string {
|
||||||
@@ -231,7 +255,11 @@ function ensureChangelogHeader(existingChangelog: string): string {
|
|||||||
return `${CHANGELOG_HEADER}\n\n${trimmed}\n`;
|
return `${CHANGELOG_HEADER}\n\n${trimmed}\n`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function prependReleaseSection(existingChangelog: string, releaseSection: string, version: string): string {
|
function prependReleaseSection(
|
||||||
|
existingChangelog: string,
|
||||||
|
releaseSection: string,
|
||||||
|
version: string,
|
||||||
|
): string {
|
||||||
const normalizedExisting = ensureChangelogHeader(existingChangelog);
|
const normalizedExisting = ensureChangelogHeader(existingChangelog);
|
||||||
if (extractReleaseSectionBody(normalizedExisting, version) !== null) {
|
if (extractReleaseSectionBody(normalizedExisting, version) !== null) {
|
||||||
throw new Error(`CHANGELOG already contains a section for v${version}.`);
|
throw new Error(`CHANGELOG already contains a section for v${version}.`);
|
||||||
@@ -263,9 +291,7 @@ function extractReleaseSectionBody(changelog: string, version: string): string |
|
|||||||
return body.trim();
|
return body.trim();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function resolveChangelogOutputPaths(options?: {
|
export function resolveChangelogOutputPaths(options?: { cwd?: string }): string[] {
|
||||||
cwd?: string;
|
|
||||||
}): string[] {
|
|
||||||
const cwd = options?.cwd ?? process.cwd();
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
return [path.join(cwd, 'CHANGELOG.md')];
|
return [path.join(cwd, 'CHANGELOG.md')];
|
||||||
}
|
}
|
||||||
@@ -290,11 +316,7 @@ function renderReleaseNotes(changes: string): string {
|
|||||||
].join('\n');
|
].join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
function writeReleaseNotesFile(
|
function writeReleaseNotesFile(cwd: string, changes: string, deps?: ChangelogFsDeps): string {
|
||||||
cwd: string,
|
|
||||||
changes: string,
|
|
||||||
deps?: ChangelogFsDeps,
|
|
||||||
): string {
|
|
||||||
const mkdirSync = deps?.mkdirSync ?? fs.mkdirSync;
|
const mkdirSync = deps?.mkdirSync ?? fs.mkdirSync;
|
||||||
const writeFileSync = deps?.writeFileSync ?? fs.writeFileSync;
|
const writeFileSync = deps?.writeFileSync ?? fs.writeFileSync;
|
||||||
const releaseNotesPath = path.join(cwd, RELEASE_NOTES_PATH);
|
const releaseNotesPath = path.join(cwd, RELEASE_NOTES_PATH);
|
||||||
@@ -359,10 +381,13 @@ export function verifyChangelogFragments(options?: ChangelogOptions): void {
|
|||||||
export function verifyChangelogReadyForRelease(options?: ChangelogOptions): void {
|
export function verifyChangelogReadyForRelease(options?: ChangelogOptions): void {
|
||||||
const cwd = options?.cwd ?? process.cwd();
|
const cwd = options?.cwd ?? process.cwd();
|
||||||
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
|
const readFileSync = options?.deps?.readFileSync ?? fs.readFileSync;
|
||||||
|
verifyRequestedVersionMatchesPackageVersion(options ?? {});
|
||||||
const version = resolveVersion(options ?? {});
|
const version = resolveVersion(options ?? {});
|
||||||
const pendingFragments = resolveFragmentPaths(cwd, options?.deps);
|
const pendingFragments = resolveFragmentPaths(cwd, options?.deps);
|
||||||
if (pendingFragments.length > 0) {
|
if (pendingFragments.length > 0) {
|
||||||
throw new Error(`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`);
|
throw new Error(
|
||||||
|
`Pending changelog fragments must be released first: ${pendingFragments.join(', ')}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const changelogPath = path.join(cwd, 'CHANGELOG.md');
|
const changelogPath = path.join(cwd, 'CHANGELOG.md');
|
||||||
@@ -382,14 +407,14 @@ function isFragmentPath(candidate: string): boolean {
|
|||||||
|
|
||||||
function isIgnoredPullRequestPath(candidate: string): boolean {
|
function isIgnoredPullRequestPath(candidate: string): boolean {
|
||||||
return (
|
return (
|
||||||
candidate === 'CHANGELOG.md'
|
candidate === 'CHANGELOG.md' ||
|
||||||
|| candidate === 'release/release-notes.md'
|
candidate === 'release/release-notes.md' ||
|
||||||
|| candidate === 'AGENTS.md'
|
candidate === 'AGENTS.md' ||
|
||||||
|| candidate === 'README.md'
|
candidate === 'README.md' ||
|
||||||
|| candidate.startsWith('changes/')
|
candidate.startsWith('changes/') ||
|
||||||
|| candidate.startsWith('docs/')
|
candidate.startsWith('docs/') ||
|
||||||
|| candidate.startsWith('.github/')
|
candidate.startsWith('.github/') ||
|
||||||
|| candidate.startsWith('backlog/')
|
candidate.startsWith('backlog/')
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -412,9 +437,7 @@ export function verifyPullRequestChangelog(options: PullRequestChangelogOptions)
|
|||||||
const hasFragment = normalizedEntries.some(
|
const hasFragment = normalizedEntries.some(
|
||||||
(entry) => entry.status !== 'D' && isFragmentPath(entry.path),
|
(entry) => entry.status !== 'D' && isFragmentPath(entry.path),
|
||||||
);
|
);
|
||||||
const requiresFragment = normalizedEntries.some(
|
const requiresFragment = normalizedEntries.some((entry) => !isIgnoredPullRequestPath(entry.path));
|
||||||
(entry) => !isIgnoredPullRequestPath(entry.path),
|
|
||||||
);
|
|
||||||
|
|
||||||
if (requiresFragment && !hasFragment) {
|
if (requiresFragment && !hasFragment) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
|||||||
30
scripts/build-win-unsigned.mjs
Normal file
30
scripts/build-win-unsigned.mjs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import { spawnSync } from 'node:child_process';
|
||||||
|
import { fileURLToPath } from 'node:url';
|
||||||
|
|
||||||
|
const env = { ...process.env };
|
||||||
|
|
||||||
|
for (const name of [
|
||||||
|
'CSC_LINK',
|
||||||
|
'CSC_KEY_PASSWORD',
|
||||||
|
'WIN_CSC_LINK',
|
||||||
|
'WIN_CSC_KEY_PASSWORD',
|
||||||
|
'CSC_NAME',
|
||||||
|
'WIN_CSC_NAME',
|
||||||
|
]) {
|
||||||
|
delete env[name];
|
||||||
|
}
|
||||||
|
|
||||||
|
env.CSC_IDENTITY_AUTO_DISCOVERY = 'false';
|
||||||
|
|
||||||
|
const electronBuilderCli = fileURLToPath(new URL('../node_modules/electron-builder/out/cli/cli.js', import.meta.url));
|
||||||
|
|
||||||
|
const result = spawnSync(process.execPath, [electronBuilderCli, '--win', 'nsis', 'zip', '--publish', 'never'], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
env,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (result.error) {
|
||||||
|
throw result.error;
|
||||||
|
}
|
||||||
|
|
||||||
|
process.exit(result.status ?? 1);
|
||||||
@@ -6,6 +6,8 @@ local function run_plugin_scenario(config)
|
|||||||
sync_calls = {},
|
sync_calls = {},
|
||||||
script_messages = {},
|
script_messages = {},
|
||||||
events = {},
|
events = {},
|
||||||
|
observers = {},
|
||||||
|
key_bindings = {},
|
||||||
osd = {},
|
osd = {},
|
||||||
logs = {},
|
logs = {},
|
||||||
property_sets = {},
|
property_sets = {},
|
||||||
@@ -37,10 +39,29 @@ local function run_plugin_scenario(config)
|
|||||||
return ""
|
return ""
|
||||||
end
|
end
|
||||||
|
|
||||||
function mp.get_property_native(_name)
|
function mp.get_property_native(name)
|
||||||
|
if name == "osd-dimensions" then
|
||||||
|
return config.osd_dimensions or {
|
||||||
|
w = 1280,
|
||||||
|
h = config.osd_height or 720,
|
||||||
|
}
|
||||||
|
end
|
||||||
return config.chapter_list or {}
|
return config.chapter_list or {}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
function mp.get_property_number(name)
|
||||||
|
if name == "time-pos" then
|
||||||
|
return config.time_pos
|
||||||
|
end
|
||||||
|
if name == "sub-pos" then
|
||||||
|
return config.sub_pos or 100
|
||||||
|
end
|
||||||
|
if name == "osd-height" then
|
||||||
|
return config.osd_height or 720
|
||||||
|
end
|
||||||
|
return nil
|
||||||
|
end
|
||||||
|
|
||||||
function mp.get_script_directory()
|
function mp.get_script_directory()
|
||||||
return "plugin/subminer"
|
return "plugin/subminer"
|
||||||
end
|
end
|
||||||
@@ -123,7 +144,13 @@ local function run_plugin_scenario(config)
|
|||||||
recorded.script_messages[name] = fn
|
recorded.script_messages[name] = fn
|
||||||
end
|
end
|
||||||
|
|
||||||
function mp.add_key_binding(_keys, _name, _fn) end
|
function mp.add_key_binding(keys, name, fn)
|
||||||
|
recorded.key_bindings[#recorded.key_bindings + 1] = {
|
||||||
|
keys = keys,
|
||||||
|
name = name,
|
||||||
|
fn = fn,
|
||||||
|
}
|
||||||
|
end
|
||||||
function mp.register_event(name, fn)
|
function mp.register_event(name, fn)
|
||||||
if recorded.events[name] == nil then
|
if recorded.events[name] == nil then
|
||||||
recorded.events[name] = {}
|
recorded.events[name] = {}
|
||||||
@@ -131,7 +158,12 @@ local function run_plugin_scenario(config)
|
|||||||
recorded.events[name][#recorded.events[name] + 1] = fn
|
recorded.events[name][#recorded.events[name] + 1] = fn
|
||||||
end
|
end
|
||||||
function mp.add_hook(_name, _prio, _fn) end
|
function mp.add_hook(_name, _prio, _fn) end
|
||||||
function mp.observe_property(_name, _kind, _fn) end
|
function mp.observe_property(name, _kind, fn)
|
||||||
|
if recorded.observers[name] == nil then
|
||||||
|
recorded.observers[name] = {}
|
||||||
|
end
|
||||||
|
recorded.observers[name][#recorded.observers[name] + 1] = fn
|
||||||
|
end
|
||||||
function mp.osd_message(message, _duration)
|
function mp.osd_message(message, _duration)
|
||||||
recorded.osd[#recorded.osd + 1] = message
|
recorded.osd[#recorded.osd + 1] = message
|
||||||
end
|
end
|
||||||
@@ -177,6 +209,12 @@ local function run_plugin_scenario(config)
|
|||||||
end
|
end
|
||||||
|
|
||||||
function utils.parse_json(json)
|
function utils.parse_json(json)
|
||||||
|
if json == '{"enabled":true,"amount":125}' then
|
||||||
|
return {
|
||||||
|
enabled = true,
|
||||||
|
amount = 125,
|
||||||
|
}, nil
|
||||||
|
end
|
||||||
if json == "__MAL_FOUND__" then
|
if json == "__MAL_FOUND__" then
|
||||||
return {
|
return {
|
||||||
categories = {
|
categories = {
|
||||||
@@ -213,6 +251,26 @@ local function run_plugin_scenario(config)
|
|||||||
package.loaded["mp.msg"] = nil
|
package.loaded["mp.msg"] = nil
|
||||||
package.loaded["mp.options"] = nil
|
package.loaded["mp.options"] = nil
|
||||||
package.loaded["mp.utils"] = nil
|
package.loaded["mp.utils"] = nil
|
||||||
|
package.loaded["binary"] = nil
|
||||||
|
package.loaded["bootstrap"] = nil
|
||||||
|
package.loaded["environment"] = nil
|
||||||
|
package.loaded["hover"] = nil
|
||||||
|
package.loaded["init"] = nil
|
||||||
|
package.loaded["lifecycle"] = nil
|
||||||
|
package.loaded["log"] = nil
|
||||||
|
package.loaded["messages"] = nil
|
||||||
|
package.loaded["options"] = nil
|
||||||
|
package.loaded["process"] = nil
|
||||||
|
package.loaded["state"] = nil
|
||||||
|
package.loaded["ui"] = nil
|
||||||
|
package.loaded["aniskip"] = nil
|
||||||
|
_G.__subminer_plugin_bootstrapped = nil
|
||||||
|
local original_package_config = package.config
|
||||||
|
if config.platform == "windows" then
|
||||||
|
package.config = "\\\n;\n?\n!\n-\n"
|
||||||
|
else
|
||||||
|
package.config = "/\n;\n?\n!\n-\n"
|
||||||
|
end
|
||||||
|
|
||||||
package.preload["mp"] = function()
|
package.preload["mp"] = function()
|
||||||
return mp
|
return mp
|
||||||
@@ -246,6 +304,7 @@ local function run_plugin_scenario(config)
|
|||||||
end
|
end
|
||||||
|
|
||||||
local ok, err = pcall(dofile, "plugin/subminer/main.lua")
|
local ok, err = pcall(dofile, "plugin/subminer/main.lua")
|
||||||
|
package.config = original_package_config
|
||||||
if not ok then
|
if not ok then
|
||||||
return nil, err, recorded
|
return nil, err, recorded
|
||||||
end
|
end
|
||||||
@@ -412,6 +471,22 @@ local function fire_event(recorded, name)
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
local function fire_observer(recorded, name, value)
|
||||||
|
local listeners = recorded.observers[name] or {}
|
||||||
|
for _, listener in ipairs(listeners) do
|
||||||
|
listener(name, value)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
local function has_key_binding(recorded, keys, name)
|
||||||
|
for _, binding in ipairs(recorded.key_bindings or {}) do
|
||||||
|
if binding.keys == keys and binding.name == name then
|
||||||
|
return true
|
||||||
|
end
|
||||||
|
end
|
||||||
|
return false
|
||||||
|
end
|
||||||
|
|
||||||
local binary_path = "/tmp/subminer-binary"
|
local binary_path = "/tmp/subminer-binary"
|
||||||
|
|
||||||
do
|
do
|
||||||
@@ -516,6 +591,38 @@ do
|
|||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
do
|
||||||
|
local recorded, err = run_plugin_scenario({
|
||||||
|
process_list = "",
|
||||||
|
option_overrides = {
|
||||||
|
binary_path = binary_path,
|
||||||
|
auto_start = "no",
|
||||||
|
},
|
||||||
|
media_title = "Sample Show S01E01",
|
||||||
|
time_pos = 13,
|
||||||
|
mal_lookup_stdout = "__MAL_FOUND__",
|
||||||
|
aniskip_stdout = "__ANISKIP_FOUND__",
|
||||||
|
files = {
|
||||||
|
[binary_path] = true,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
assert_true(recorded ~= nil, "plugin failed to load for default AniSkip keybinding scenario: " .. tostring(err))
|
||||||
|
assert_true(
|
||||||
|
has_key_binding(recorded, "TAB", "subminer-skip-intro"),
|
||||||
|
"default AniSkip keybinding should register TAB"
|
||||||
|
)
|
||||||
|
assert_true(
|
||||||
|
not has_key_binding(recorded, "y-k", "subminer-skip-intro-fallback"),
|
||||||
|
"default AniSkip keybinding should not also register legacy y-k fallback"
|
||||||
|
)
|
||||||
|
recorded.script_messages["subminer-aniskip-refresh"]()
|
||||||
|
fire_observer(recorded, "time-pos", 13)
|
||||||
|
assert_true(
|
||||||
|
has_osd_message(recorded.osd, "You can skip by pressing TAB"),
|
||||||
|
"default AniSkip prompt should mention TAB"
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
do
|
do
|
||||||
local recorded, err = run_plugin_scenario({
|
local recorded, err = run_plugin_scenario({
|
||||||
process_list = "",
|
process_list = "",
|
||||||
|
|||||||
@@ -73,3 +73,10 @@ test('default keybindings include primary and secondary subtitle track cycling o
|
|||||||
assert.deepEqual(keybindingMap.get('KeyJ'), ['cycle', 'sid']);
|
assert.deepEqual(keybindingMap.get('KeyJ'), ['cycle', 'sid']);
|
||||||
assert.deepEqual(keybindingMap.get('Shift+KeyJ'), ['cycle', 'secondary-sid']);
|
assert.deepEqual(keybindingMap.get('Shift+KeyJ'), ['cycle', 'secondary-sid']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('default keybindings include fullscreen on F', () => {
|
||||||
|
const keybindingMap = new Map(
|
||||||
|
DEFAULT_KEYBINDINGS.map((binding) => [binding.key, binding.command]),
|
||||||
|
);
|
||||||
|
assert.deepEqual(keybindingMap.get('KeyF'), ['cycle', 'fullscreen']);
|
||||||
|
});
|
||||||
|
|||||||
@@ -50,6 +50,7 @@ export const SPECIAL_COMMANDS = {
|
|||||||
|
|
||||||
export const DEFAULT_KEYBINDINGS: NonNullable<ResolvedConfig['keybindings']> = [
|
export const DEFAULT_KEYBINDINGS: NonNullable<ResolvedConfig['keybindings']> = [
|
||||||
{ key: 'Space', command: ['cycle', 'pause'] },
|
{ key: 'Space', command: ['cycle', 'pause'] },
|
||||||
|
{ key: 'KeyF', command: ['cycle', 'fullscreen'] },
|
||||||
{ key: 'KeyJ', command: ['cycle', 'sid'] },
|
{ key: 'KeyJ', command: ['cycle', 'sid'] },
|
||||||
{ key: 'Shift+KeyJ', command: ['cycle', 'secondary-sid'] },
|
{ key: 'Shift+KeyJ', command: ['cycle', 'secondary-sid'] },
|
||||||
{ key: 'ArrowRight', command: ['seek', 5] },
|
{ key: 'ArrowRight', command: ['seek', 5] },
|
||||||
|
|||||||
@@ -34,6 +34,44 @@ test('guessAnilistMediaInfo falls back to parser when guessit fails', async () =
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('guessAnilistMediaInfo uses basename for guessit input', async () => {
|
||||||
|
const mediaPath =
|
||||||
|
'/truenas/jellyfin/anime/Rascal-Does-not-Dream-of-Bunny-Girl-Senapi/Season-1/Rascal Does Not Dream of Bunny Girl Senpai (2018) - S01E01 - 001 - My Senpai Is a Bunny Girl [Bluray-1080p][10bit][x265][Opus 2.0][JA]-Subs.mkv';
|
||||||
|
const seenTargets: string[] = [];
|
||||||
|
const result = await guessAnilistMediaInfo(mediaPath, null, {
|
||||||
|
runGuessit: async (target) => {
|
||||||
|
seenTargets.push(target);
|
||||||
|
return JSON.stringify({
|
||||||
|
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
episode: 1,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
});
|
||||||
|
assert.deepEqual(seenTargets, [
|
||||||
|
'Rascal Does Not Dream of Bunny Girl Senpai (2018) - S01E01 - 001 - My Senpai Is a Bunny Girl [Bluray-1080p][10bit][x265][Opus 2.0][JA]-Subs.mkv',
|
||||||
|
]);
|
||||||
|
assert.deepEqual(result, {
|
||||||
|
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
episode: 1,
|
||||||
|
source: 'guessit',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('guessAnilistMediaInfo joins multi-part guessit titles', async () => {
|
||||||
|
const result = await guessAnilistMediaInfo('/tmp/demo.mkv', null, {
|
||||||
|
runGuessit: async () =>
|
||||||
|
JSON.stringify({
|
||||||
|
title: ['Rascal', 'Does-not-Dream-of-Bunny-Girl-Senpai'],
|
||||||
|
episode: 1,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
assert.deepEqual(result, {
|
||||||
|
title: 'Rascal Does not Dream of Bunny Girl Senpai',
|
||||||
|
episode: 1,
|
||||||
|
source: 'guessit',
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test('updateAnilistPostWatchProgress updates progress when behind', async () => {
|
test('updateAnilistPostWatchProgress updates progress when behind', async () => {
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
let call = 0;
|
let call = 0;
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import * as childProcess from 'child_process';
|
import * as childProcess from 'child_process';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
import { parseMediaInfo } from '../../../jimaku/utils';
|
import { parseMediaInfo } from '../../../jimaku/utils';
|
||||||
|
|
||||||
@@ -90,6 +91,32 @@ function firstString(value: unknown): string | null {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function normalizeGuessitTitlePart(value: string): string {
|
||||||
|
return value
|
||||||
|
.replace(/[._]+/g, ' ')
|
||||||
|
.replace(/-/g, ' ')
|
||||||
|
.replace(/\s+/g, ' ')
|
||||||
|
.trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function readGuessitTitle(value: unknown): string | null {
|
||||||
|
if (typeof value === 'string') {
|
||||||
|
const normalized = normalizeGuessitTitlePart(value);
|
||||||
|
return normalized.length > 0 ? normalized : null;
|
||||||
|
}
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
const parts = value
|
||||||
|
.filter((item): item is string => typeof item === 'string')
|
||||||
|
.map((item) => normalizeGuessitTitlePart(item))
|
||||||
|
.filter((item) => item.length > 0);
|
||||||
|
if (parts.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return parts.join(' ').replace(/\s+/g, ' ').trim();
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
function firstPositiveInteger(value: unknown): number | null {
|
function firstPositiveInteger(value: unknown): number | null {
|
||||||
if (typeof value === 'number' && Number.isInteger(value) && value > 0) {
|
if (typeof value === 'number' && Number.isInteger(value) && value > 0) {
|
||||||
return value;
|
return value;
|
||||||
@@ -184,12 +211,13 @@ export async function guessAnilistMediaInfo(
|
|||||||
deps: GuessAnilistMediaInfoDeps = { runGuessit },
|
deps: GuessAnilistMediaInfoDeps = { runGuessit },
|
||||||
): Promise<AnilistMediaGuess | null> {
|
): Promise<AnilistMediaGuess | null> {
|
||||||
const target = mediaPath ?? mediaTitle;
|
const target = mediaPath ?? mediaTitle;
|
||||||
|
const guessitTarget = mediaPath ? path.basename(mediaPath) : mediaTitle;
|
||||||
|
|
||||||
if (target && target.trim().length > 0) {
|
if (guessitTarget && guessitTarget.trim().length > 0) {
|
||||||
try {
|
try {
|
||||||
const stdout = await deps.runGuessit(target);
|
const stdout = await deps.runGuessit(guessitTarget);
|
||||||
const parsed = JSON.parse(stdout) as Record<string, unknown>;
|
const parsed = JSON.parse(stdout) as Record<string, unknown>;
|
||||||
const title = firstString(parsed.title);
|
const title = readGuessitTitle(parsed.title);
|
||||||
const episode = firstPositiveInteger(parsed.episode);
|
const episode = firstPositiveInteger(parsed.episode);
|
||||||
if (title) {
|
if (title) {
|
||||||
return { title, episode, source: 'guessit' };
|
return { title, episode, source: 'guessit' };
|
||||||
|
|||||||
@@ -72,6 +72,10 @@ export {
|
|||||||
syncOverlayWindowLayer,
|
syncOverlayWindowLayer,
|
||||||
updateOverlayWindowBounds,
|
updateOverlayWindowBounds,
|
||||||
} from './overlay-window';
|
} from './overlay-window';
|
||||||
|
export {
|
||||||
|
handleOverlayWindowBeforeInputEvent,
|
||||||
|
isTabInputForMpvForwarding,
|
||||||
|
} from './overlay-window-input';
|
||||||
export { initializeOverlayRuntime } from './overlay-runtime-init';
|
export { initializeOverlayRuntime } from './overlay-runtime-init';
|
||||||
export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility';
|
export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility';
|
||||||
export {
|
export {
|
||||||
|
|||||||
@@ -77,7 +77,7 @@ test('macOS keeps visible overlay hidden while tracker is not ready and emits on
|
|||||||
assert.ok(!calls.includes('show'));
|
assert.ok(!calls.includes('show'));
|
||||||
});
|
});
|
||||||
|
|
||||||
test('non-macOS keeps fallback visible overlay behavior when tracker is not ready', () => {
|
test('tracked non-macOS overlay stays hidden while tracker is not ready', () => {
|
||||||
const { window, calls } = createMainWindowRecorder();
|
const { window, calls } = createMainWindowRecorder();
|
||||||
let trackerWarning = false;
|
let trackerWarning = false;
|
||||||
const tracker: WindowTrackerStub = {
|
const tracker: WindowTrackerStub = {
|
||||||
@@ -116,7 +116,48 @@ test('non-macOS keeps fallback visible overlay behavior when tracker is not read
|
|||||||
} as never);
|
} as never);
|
||||||
|
|
||||||
assert.equal(trackerWarning, true);
|
assert.equal(trackerWarning, true);
|
||||||
assert.ok(calls.includes('update-bounds'));
|
assert.ok(calls.includes('hide'));
|
||||||
|
assert.ok(!calls.includes('update-bounds'));
|
||||||
|
assert.ok(!calls.includes('show'));
|
||||||
|
assert.ok(!calls.includes('focus'));
|
||||||
|
assert.ok(!calls.includes('osd'));
|
||||||
|
});
|
||||||
|
|
||||||
|
test('untracked non-macOS overlay keeps fallback visible behavior when no tracker exists', () => {
|
||||||
|
const { window, calls } = createMainWindowRecorder();
|
||||||
|
let trackerWarning = false;
|
||||||
|
|
||||||
|
updateVisibleOverlayVisibility({
|
||||||
|
visibleOverlayVisible: true,
|
||||||
|
mainWindow: window as never,
|
||||||
|
windowTracker: null,
|
||||||
|
trackerNotReadyWarningShown: trackerWarning,
|
||||||
|
setTrackerNotReadyWarningShown: (shown: boolean) => {
|
||||||
|
trackerWarning = shown;
|
||||||
|
},
|
||||||
|
updateVisibleOverlayBounds: () => {
|
||||||
|
calls.push('update-bounds');
|
||||||
|
},
|
||||||
|
ensureOverlayWindowLevel: () => {
|
||||||
|
calls.push('ensure-level');
|
||||||
|
},
|
||||||
|
syncPrimaryOverlayWindowLayer: () => {
|
||||||
|
calls.push('sync-layer');
|
||||||
|
},
|
||||||
|
enforceOverlayLayerOrder: () => {
|
||||||
|
calls.push('enforce-order');
|
||||||
|
},
|
||||||
|
syncOverlayShortcuts: () => {
|
||||||
|
calls.push('sync-shortcuts');
|
||||||
|
},
|
||||||
|
isMacOSPlatform: false,
|
||||||
|
showOverlayLoadingOsd: () => {
|
||||||
|
calls.push('osd');
|
||||||
|
},
|
||||||
|
resolveFallbackBounds: () => ({ x: 12, y: 24, width: 640, height: 360 }),
|
||||||
|
} as never);
|
||||||
|
|
||||||
|
assert.equal(trackerWarning, false);
|
||||||
assert.ok(calls.includes('show'));
|
assert.ok(calls.includes('show'));
|
||||||
assert.ok(calls.includes('focus'));
|
assert.ok(calls.includes('focus'));
|
||||||
assert.ok(!calls.includes('osd'));
|
assert.ok(!calls.includes('osd'));
|
||||||
|
|||||||
@@ -84,20 +84,8 @@ export function updateVisibleOverlayVisibility(args: {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (args.isMacOSPlatform || args.isWindowsPlatform) {
|
|
||||||
mainWindow.hide();
|
mainWindow.hide();
|
||||||
args.syncOverlayShortcuts();
|
args.syncOverlayShortcuts();
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const fallbackBounds = args.resolveFallbackBounds?.();
|
|
||||||
if (!fallbackBounds) return;
|
|
||||||
|
|
||||||
args.updateVisibleOverlayBounds(fallbackBounds);
|
|
||||||
args.syncPrimaryOverlayWindowLayer('visible');
|
|
||||||
showPassiveVisibleOverlay();
|
|
||||||
args.enforceOverlayLayerOrder();
|
|
||||||
args.syncOverlayShortcuts();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export function setVisibleOverlayVisible(options: {
|
export function setVisibleOverlayVisible(options: {
|
||||||
|
|||||||
61
src/core/services/overlay-window-input.ts
Normal file
61
src/core/services/overlay-window-input.ts
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
export type OverlayWindowKind = 'visible' | 'modal';
|
||||||
|
|
||||||
|
export function isTabInputForMpvForwarding(input: Electron.Input): boolean {
|
||||||
|
if (input.type !== 'keyDown' || input.isAutoRepeat) return false;
|
||||||
|
if (input.alt || input.control || input.meta || input.shift) return false;
|
||||||
|
return input.code === 'Tab' || input.key === 'Tab';
|
||||||
|
}
|
||||||
|
|
||||||
|
function isLookupWindowToggleInput(input: Electron.Input): boolean {
|
||||||
|
if (input.type !== 'keyDown') return false;
|
||||||
|
if (input.alt) return false;
|
||||||
|
if (!input.control && !input.meta) return false;
|
||||||
|
if (input.shift) return false;
|
||||||
|
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
|
||||||
|
return input.code === 'KeyY' || normalizedKey === 'y';
|
||||||
|
}
|
||||||
|
|
||||||
|
function isKeyboardModeToggleInput(input: Electron.Input): boolean {
|
||||||
|
if (input.type !== 'keyDown') return false;
|
||||||
|
if (input.alt) return false;
|
||||||
|
if (!input.control && !input.meta) return false;
|
||||||
|
if (!input.shift) return false;
|
||||||
|
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
|
||||||
|
return input.code === 'KeyY' || normalizedKey === 'y';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function handleOverlayWindowBeforeInputEvent(options: {
|
||||||
|
kind: OverlayWindowKind;
|
||||||
|
windowVisible: boolean;
|
||||||
|
input: Electron.Input;
|
||||||
|
preventDefault: () => void;
|
||||||
|
sendKeyboardModeToggleRequested: () => void;
|
||||||
|
sendLookupWindowToggleRequested: () => void;
|
||||||
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
|
}): boolean {
|
||||||
|
if (options.kind === 'modal') return false;
|
||||||
|
if (!options.windowVisible) return false;
|
||||||
|
|
||||||
|
if (isKeyboardModeToggleInput(options.input)) {
|
||||||
|
options.preventDefault();
|
||||||
|
options.sendKeyboardModeToggleRequested();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isLookupWindowToggleInput(options.input)) {
|
||||||
|
options.preventDefault();
|
||||||
|
options.sendLookupWindowToggleRequested();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isTabInputForMpvForwarding(options.input)) {
|
||||||
|
options.preventDefault();
|
||||||
|
options.forwardTabToMpv();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.tryHandleOverlayShortcutLocalFallback(options.input)) return false;
|
||||||
|
options.preventDefault();
|
||||||
|
return true;
|
||||||
|
}
|
||||||
84
src/core/services/overlay-window.test.ts
Normal file
84
src/core/services/overlay-window.test.ts
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import test from 'node:test';
|
||||||
|
import {
|
||||||
|
handleOverlayWindowBeforeInputEvent,
|
||||||
|
isTabInputForMpvForwarding,
|
||||||
|
} from './overlay-window-input';
|
||||||
|
|
||||||
|
test('isTabInputForMpvForwarding matches bare Tab keydown only', () => {
|
||||||
|
assert.equal(
|
||||||
|
isTabInputForMpvForwarding({
|
||||||
|
type: 'keyDown',
|
||||||
|
key: 'Tab',
|
||||||
|
code: 'Tab',
|
||||||
|
} as Electron.Input),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
isTabInputForMpvForwarding({
|
||||||
|
type: 'keyDown',
|
||||||
|
key: 'Tab',
|
||||||
|
code: 'Tab',
|
||||||
|
shift: true,
|
||||||
|
} as Electron.Input),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
assert.equal(
|
||||||
|
isTabInputForMpvForwarding({
|
||||||
|
type: 'keyUp',
|
||||||
|
key: 'Tab',
|
||||||
|
code: 'Tab',
|
||||||
|
} as Electron.Input),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('handleOverlayWindowBeforeInputEvent forwards Tab to mpv for visible overlays', () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
|
||||||
|
const handled = handleOverlayWindowBeforeInputEvent({
|
||||||
|
kind: 'visible',
|
||||||
|
windowVisible: true,
|
||||||
|
input: {
|
||||||
|
type: 'keyDown',
|
||||||
|
key: 'Tab',
|
||||||
|
code: 'Tab',
|
||||||
|
} as Electron.Input,
|
||||||
|
preventDefault: () => calls.push('prevent-default'),
|
||||||
|
sendKeyboardModeToggleRequested: () => calls.push('keyboard-mode'),
|
||||||
|
sendLookupWindowToggleRequested: () => calls.push('lookup-toggle'),
|
||||||
|
tryHandleOverlayShortcutLocalFallback: () => {
|
||||||
|
calls.push('fallback');
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
forwardTabToMpv: () => calls.push('forward-tab'),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(handled, true);
|
||||||
|
assert.deepEqual(calls, ['prevent-default', 'forward-tab']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('handleOverlayWindowBeforeInputEvent leaves modal Tab handling alone', () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
|
||||||
|
const handled = handleOverlayWindowBeforeInputEvent({
|
||||||
|
kind: 'modal',
|
||||||
|
windowVisible: true,
|
||||||
|
input: {
|
||||||
|
type: 'keyDown',
|
||||||
|
key: 'Tab',
|
||||||
|
code: 'Tab',
|
||||||
|
} as Electron.Input,
|
||||||
|
preventDefault: () => calls.push('prevent-default'),
|
||||||
|
sendKeyboardModeToggleRequested: () => calls.push('keyboard-mode'),
|
||||||
|
sendLookupWindowToggleRequested: () => calls.push('lookup-toggle'),
|
||||||
|
tryHandleOverlayShortcutLocalFallback: () => {
|
||||||
|
calls.push('fallback');
|
||||||
|
return false;
|
||||||
|
},
|
||||||
|
forwardTabToMpv: () => calls.push('forward-tab'),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.equal(handled, false);
|
||||||
|
assert.deepEqual(calls, []);
|
||||||
|
});
|
||||||
@@ -3,6 +3,10 @@ import * as path from 'path';
|
|||||||
import { WindowGeometry } from '../../types';
|
import { WindowGeometry } from '../../types';
|
||||||
import { createLogger } from '../../logger';
|
import { createLogger } from '../../logger';
|
||||||
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
|
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
|
||||||
|
import {
|
||||||
|
handleOverlayWindowBeforeInputEvent,
|
||||||
|
type OverlayWindowKind,
|
||||||
|
} from './overlay-window-input';
|
||||||
|
|
||||||
const logger = createLogger('main:overlay-window');
|
const logger = createLogger('main:overlay-window');
|
||||||
const overlayWindowLayerByInstance = new WeakMap<BrowserWindow, OverlayWindowKind>();
|
const overlayWindowLayerByInstance = new WeakMap<BrowserWindow, OverlayWindowKind>();
|
||||||
@@ -23,26 +27,6 @@ function loadOverlayWindowLayer(window: BrowserWindow, layer: OverlayWindowKind)
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export type OverlayWindowKind = 'visible' | 'modal';
|
|
||||||
|
|
||||||
function isLookupWindowToggleInput(input: Electron.Input): boolean {
|
|
||||||
if (input.type !== 'keyDown') return false;
|
|
||||||
if (input.alt) return false;
|
|
||||||
if (!input.control && !input.meta) return false;
|
|
||||||
if (input.shift) return false;
|
|
||||||
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
|
|
||||||
return input.code === 'KeyY' || normalizedKey === 'y';
|
|
||||||
}
|
|
||||||
|
|
||||||
function isKeyboardModeToggleInput(input: Electron.Input): boolean {
|
|
||||||
if (input.type !== 'keyDown') return false;
|
|
||||||
if (input.alt) return false;
|
|
||||||
if (!input.control && !input.meta) return false;
|
|
||||||
if (!input.shift) return false;
|
|
||||||
const normalizedKey = typeof input.key === 'string' ? input.key.toLowerCase() : '';
|
|
||||||
return input.code === 'KeyY' || normalizedKey === 'y';
|
|
||||||
}
|
|
||||||
|
|
||||||
export function updateOverlayWindowBounds(
|
export function updateOverlayWindowBounds(
|
||||||
geometry: WindowGeometry,
|
geometry: WindowGeometry,
|
||||||
window: BrowserWindow | null,
|
window: BrowserWindow | null,
|
||||||
@@ -92,6 +76,7 @@ export function createOverlayWindow(
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
||||||
isOverlayVisible: (kind: OverlayWindowKind) => boolean;
|
isOverlayVisible: (kind: OverlayWindowKind) => boolean;
|
||||||
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
onWindowClosed: (kind: OverlayWindowKind) => void;
|
onWindowClosed: (kind: OverlayWindowKind) => void;
|
||||||
},
|
},
|
||||||
): BrowserWindow {
|
): BrowserWindow {
|
||||||
@@ -142,20 +127,19 @@ export function createOverlayWindow(
|
|||||||
}
|
}
|
||||||
|
|
||||||
window.webContents.on('before-input-event', (event, input) => {
|
window.webContents.on('before-input-event', (event, input) => {
|
||||||
if (kind === 'modal') return;
|
handleOverlayWindowBeforeInputEvent({
|
||||||
if (!window.isVisible()) return;
|
kind,
|
||||||
if (isKeyboardModeToggleInput(input)) {
|
windowVisible: window.isVisible(),
|
||||||
event.preventDefault();
|
input,
|
||||||
window.webContents.send(IPC_CHANNELS.event.keyboardModeToggleRequested);
|
preventDefault: () => event.preventDefault(),
|
||||||
return;
|
sendKeyboardModeToggleRequested: () =>
|
||||||
}
|
window.webContents.send(IPC_CHANNELS.event.keyboardModeToggleRequested),
|
||||||
if (isLookupWindowToggleInput(input)) {
|
sendLookupWindowToggleRequested: () =>
|
||||||
event.preventDefault();
|
window.webContents.send(IPC_CHANNELS.event.lookupWindowToggleRequested),
|
||||||
window.webContents.send(IPC_CHANNELS.event.lookupWindowToggleRequested);
|
tryHandleOverlayShortcutLocalFallback: (nextInput) =>
|
||||||
return;
|
options.tryHandleOverlayShortcutLocalFallback(nextInput),
|
||||||
}
|
forwardTabToMpv: () => options.forwardTabToMpv(),
|
||||||
if (!options.tryHandleOverlayShortcutLocalFallback(input)) return;
|
});
|
||||||
event.preventDefault();
|
|
||||||
});
|
});
|
||||||
|
|
||||||
window.hide();
|
window.hide();
|
||||||
@@ -185,3 +169,5 @@ export function syncOverlayWindowLayer(window: BrowserWindow, layer: 'visible'):
|
|||||||
if (overlayWindowLayerByInstance.get(window) === layer) return;
|
if (overlayWindowLayerByInstance.get(window) === layer) return;
|
||||||
loadOverlayWindowLayer(window, layer);
|
loadOverlayWindowLayer(window, layer);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type { OverlayWindowKind } from './overlay-window-input';
|
||||||
|
|||||||
42
src/main.ts
42
src/main.ts
@@ -372,6 +372,9 @@ import { createMediaRuntimeService } from './main/media-runtime';
|
|||||||
import { createOverlayVisibilityRuntimeService } from './main/overlay-visibility-runtime';
|
import { createOverlayVisibilityRuntimeService } from './main/overlay-visibility-runtime';
|
||||||
import { createCharacterDictionaryRuntimeService } from './main/character-dictionary-runtime';
|
import { createCharacterDictionaryRuntimeService } from './main/character-dictionary-runtime';
|
||||||
import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync';
|
import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync';
|
||||||
|
import { notifyCharacterDictionaryAutoSyncStatus } from './main/runtime/character-dictionary-auto-sync-notifications';
|
||||||
|
import { createCurrentMediaTokenizationGate } from './main/runtime/current-media-tokenization-gate';
|
||||||
|
import { createStartupOsdSequencer } from './main/runtime/startup-osd-sequencer';
|
||||||
import {
|
import {
|
||||||
getPreferredYomitanAnkiServerUrl as getPreferredYomitanAnkiServerUrlRuntime,
|
getPreferredYomitanAnkiServerUrl as getPreferredYomitanAnkiServerUrlRuntime,
|
||||||
shouldForceOverrideYomitanAnkiServer,
|
shouldForceOverrideYomitanAnkiServer,
|
||||||
@@ -913,6 +916,10 @@ const configDerivedRuntime = createConfigDerivedRuntime(buildConfigDerivedRuntim
|
|||||||
const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsHandler());
|
const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsHandler());
|
||||||
let autoPlayReadySignalMediaPath: string | null = null;
|
let autoPlayReadySignalMediaPath: string | null = null;
|
||||||
let autoPlayReadySignalGeneration = 0;
|
let autoPlayReadySignalGeneration = 0;
|
||||||
|
const currentMediaTokenizationGate = createCurrentMediaTokenizationGate();
|
||||||
|
const startupOsdSequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => showMpvOsd(message),
|
||||||
|
});
|
||||||
|
|
||||||
function maybeSignalPluginAutoplayReady(
|
function maybeSignalPluginAutoplayReady(
|
||||||
payload: SubtitleData,
|
payload: SubtitleData,
|
||||||
@@ -1324,8 +1331,13 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt
|
|||||||
profileScope: config.profileScope,
|
profileScope: config.profileScope,
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
getOrCreateCurrentSnapshot: () => characterDictionaryRuntime.getOrCreateCurrentSnapshot(),
|
getOrCreateCurrentSnapshot: (targetPath, progress) =>
|
||||||
|
characterDictionaryRuntime.getOrCreateCurrentSnapshot(targetPath, progress),
|
||||||
buildMergedDictionary: (mediaIds) => characterDictionaryRuntime.buildMergedDictionary(mediaIds),
|
buildMergedDictionary: (mediaIds) => characterDictionaryRuntime.buildMergedDictionary(mediaIds),
|
||||||
|
waitForYomitanMutationReady: () =>
|
||||||
|
currentMediaTokenizationGate.waitUntilReady(
|
||||||
|
appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null,
|
||||||
|
),
|
||||||
getYomitanDictionaryInfo: async () => {
|
getYomitanDictionaryInfo: async () => {
|
||||||
await ensureYomitanExtensionLoaded();
|
await ensureYomitanExtensionLoaded();
|
||||||
return await getYomitanDictionaryInfo(getYomitanParserRuntimeDeps(), {
|
return await getYomitanDictionaryInfo(getYomitanParserRuntimeDeps(), {
|
||||||
@@ -1364,6 +1376,24 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt
|
|||||||
clearSchedule: (timer) => clearTimeout(timer),
|
clearSchedule: (timer) => clearTimeout(timer),
|
||||||
logInfo: (message) => logger.info(message),
|
logInfo: (message) => logger.info(message),
|
||||||
logWarn: (message) => logger.warn(message),
|
logWarn: (message) => logger.warn(message),
|
||||||
|
onSyncStatus: (event) => {
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(event, {
|
||||||
|
getNotificationType: () => getResolvedConfig().ankiConnect.behavior.notificationType,
|
||||||
|
showOsd: (message) => showMpvOsd(message),
|
||||||
|
showDesktopNotification: (title, options) => showDesktopNotification(title, options),
|
||||||
|
startupOsdSequencer,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onSyncComplete: ({ mediaId, mediaTitle, changed }) => {
|
||||||
|
if (appState.yomitanParserWindow) {
|
||||||
|
clearYomitanParserCachesForWindow(appState.yomitanParserWindow);
|
||||||
|
}
|
||||||
|
subtitleProcessingController.invalidateTokenizationCache();
|
||||||
|
subtitleProcessingController.refreshCurrentSubtitle(appState.currentSubText);
|
||||||
|
logger.info(
|
||||||
|
`[dictionary:auto-sync] refreshed current subtitle after sync (AniList ${mediaId}, changed=${changed ? 'yes' : 'no'}, title=${mediaTitle})`,
|
||||||
|
);
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService(
|
const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService(
|
||||||
@@ -2673,6 +2703,8 @@ const {
|
|||||||
},
|
},
|
||||||
updateCurrentMediaPath: (path) => {
|
updateCurrentMediaPath: (path) => {
|
||||||
autoPlayReadySignalMediaPath = null;
|
autoPlayReadySignalMediaPath = null;
|
||||||
|
currentMediaTokenizationGate.updateCurrentMediaPath(path);
|
||||||
|
startupOsdSequencer.reset();
|
||||||
if (path) {
|
if (path) {
|
||||||
ensureImmersionTrackerStarted();
|
ensureImmersionTrackerStarted();
|
||||||
}
|
}
|
||||||
@@ -2793,6 +2825,10 @@ const {
|
|||||||
getYomitanGroupDebugEnabled: () => appState.overlayDebugVisualizationEnabled,
|
getYomitanGroupDebugEnabled: () => appState.overlayDebugVisualizationEnabled,
|
||||||
getMecabTokenizer: () => appState.mecabTokenizer,
|
getMecabTokenizer: () => appState.mecabTokenizer,
|
||||||
onTokenizationReady: (text) => {
|
onTokenizationReady: (text) => {
|
||||||
|
currentMediaTokenizationGate.markReady(
|
||||||
|
appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null,
|
||||||
|
);
|
||||||
|
startupOsdSequencer.markTokenizationReady();
|
||||||
maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true });
|
maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true });
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
@@ -2812,6 +2848,9 @@ const {
|
|||||||
ensureFrequencyDictionaryLookup: () =>
|
ensureFrequencyDictionaryLookup: () =>
|
||||||
frequencyDictionaryRuntime.ensureFrequencyDictionaryLookup(),
|
frequencyDictionaryRuntime.ensureFrequencyDictionaryLookup(),
|
||||||
showMpvOsd: (message: string) => showMpvOsd(message),
|
showMpvOsd: (message: string) => showMpvOsd(message),
|
||||||
|
showLoadingOsd: (message: string) => startupOsdSequencer.showAnnotationLoading(message),
|
||||||
|
showLoadedOsd: (message: string) =>
|
||||||
|
startupOsdSequencer.markAnnotationLoadingComplete(message),
|
||||||
shouldShowOsdNotification: () => {
|
shouldShowOsdNotification: () => {
|
||||||
const type = getResolvedConfig().ankiConnect.behavior.notificationType;
|
const type = getResolvedConfig().ankiConnect.behavior.notificationType;
|
||||||
return type === 'osd' || type === 'both';
|
return type === 'osd' || type === 'both';
|
||||||
@@ -3475,6 +3514,7 @@ const { createMainWindow: createMainWindowHandler, createModalWindow: createModa
|
|||||||
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
|
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
|
||||||
tryHandleOverlayShortcutLocalFallback: (input) =>
|
tryHandleOverlayShortcutLocalFallback: (input) =>
|
||||||
overlayShortcutsRuntime.tryHandleOverlayShortcutLocalFallback(input),
|
overlayShortcutsRuntime.tryHandleOverlayShortcutLocalFallback(input),
|
||||||
|
forwardTabToMpv: () => sendMpvCommandRuntime(appState.mpvClient, ['keypress', 'TAB']),
|
||||||
onWindowClosed: (windowKind) => {
|
onWindowClosed: (windowKind) => {
|
||||||
if (windowKind === 'visible') {
|
if (windowKind === 'visible') {
|
||||||
overlayManager.setMainWindow(null);
|
overlayManager.setMainWindow(null);
|
||||||
|
|||||||
@@ -213,7 +213,7 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
|
|||||||
assert.equal(roleBadgeDiv.tag, 'div');
|
assert.equal(roleBadgeDiv.tag, 'div');
|
||||||
const badge = roleBadgeDiv.content as { tag: string; content: string };
|
const badge = roleBadgeDiv.content as { tag: string; content: string };
|
||||||
assert.equal(badge.tag, 'span');
|
assert.equal(badge.tag, 'span');
|
||||||
assert.equal(badge.content, 'Side Character');
|
assert.equal(badge.content, 'Main Character');
|
||||||
|
|
||||||
const descSection = children.find(
|
const descSection = children.find(
|
||||||
(c) =>
|
(c) =>
|
||||||
@@ -695,6 +695,128 @@ test('generateForCurrentMedia adds kana aliases for romanized names when native
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia indexes kanji family and given names using AniList first and last hints', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 37450,
|
||||||
|
episodes: 13,
|
||||||
|
title: {
|
||||||
|
romaji: 'Seishun Buta Yarou wa Bunny Girl Senpai no Yume wo Minai',
|
||||||
|
english: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
native: '青春ブタ野郎はバニーガール先輩の夢を見ない',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
romaji: 'Seishun Buta Yarou wa Bunny Girl Senpai no Yume wo Minai',
|
||||||
|
english: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
native: '青春ブタ野郎はバニーガール先輩の夢を見ない',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'SUPPORTING',
|
||||||
|
node: {
|
||||||
|
id: 77,
|
||||||
|
description: 'Classmate.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
first: 'Yuuma',
|
||||||
|
full: 'Yuuma Kunimi',
|
||||||
|
last: 'Kunimi',
|
||||||
|
native: '国見佑真',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/bunny-girl-senpai-s01e01.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'Rascal Does Not Dream of Bunny Girl Senpai - S01E01',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
episode: 1,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
const familyName = termBank.find(([term]) => term === '国見');
|
||||||
|
assert.ok(familyName, 'expected kanji family-name term from AniList hints');
|
||||||
|
assert.equal(familyName[1], 'くにみ');
|
||||||
|
|
||||||
|
const givenName = termBank.find(([term]) => term === '佑真');
|
||||||
|
assert.ok(givenName, 'expected kanji given-name term from AniList hints');
|
||||||
|
assert.equal(givenName[1], 'ゆうま');
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('generateForCurrentMedia indexes AniList alternative character names for alias lookups', async () => {
|
test('generateForCurrentMedia indexes AniList alternative character names for alias lookups', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
@@ -812,6 +934,520 @@ test('generateForCurrentMedia indexes AniList alternative character names for al
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia skips AniList characters without a native name when other valid characters exist', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 111,
|
||||||
|
description: 'Valid native name.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Alpha',
|
||||||
|
native: 'アルファ',
|
||||||
|
first: 'Alpha',
|
||||||
|
last: null,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: 'SUPPORTING',
|
||||||
|
node: {
|
||||||
|
id: 222,
|
||||||
|
description: 'Missing native name.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'John Smith',
|
||||||
|
native: '',
|
||||||
|
first: 'John',
|
||||||
|
last: 'Smith',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
assert.ok(termBank.find(([term]) => term === 'アルファ'));
|
||||||
|
assert.equal(
|
||||||
|
termBank.some(([term]) => term === 'John Smith'),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia uses AniList first and last name hints to build kanji readings', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 20594,
|
||||||
|
episodes: 10,
|
||||||
|
title: {
|
||||||
|
romaji: 'Kono Subarashii Sekai ni Shukufuku wo!',
|
||||||
|
english: 'KONOSUBA -God’s blessing on this wonderful world!',
|
||||||
|
native: 'この素晴らしい世界に祝福を!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
romaji: 'Kono Subarashii Sekai ni Shukufuku wo!',
|
||||||
|
english: 'KONOSUBA -God’s blessing on this wonderful world!',
|
||||||
|
native: 'この素晴らしい世界に祝福を!',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 1,
|
||||||
|
description: 'The protagonist.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Satou Kazuma',
|
||||||
|
native: '佐藤和真',
|
||||||
|
first: '和真',
|
||||||
|
last: '佐藤',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{
|
||||||
|
status: 200,
|
||||||
|
headers: { 'content-type': 'application/json' },
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/konosuba-s02e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'Konosuba S02E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'Konosuba',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
assert.equal(termBank.find(([term]) => term === '佐藤和真')?.[1], 'さとうかずま');
|
||||||
|
assert.equal(termBank.find(([term]) => term === '佐藤')?.[1], 'さとう');
|
||||||
|
assert.equal(termBank.find(([term]) => term === '和真')?.[1], 'かずま');
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia includes AniList gender age birthday and blood type in character information', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'SUPPORTING',
|
||||||
|
node: {
|
||||||
|
id: 123,
|
||||||
|
description: 'Second princess of Midgar.',
|
||||||
|
image: null,
|
||||||
|
gender: 'Female',
|
||||||
|
age: '15',
|
||||||
|
dateOfBirth: {
|
||||||
|
month: 9,
|
||||||
|
day: 1,
|
||||||
|
},
|
||||||
|
bloodType: 'A',
|
||||||
|
name: {
|
||||||
|
full: 'Alexia Midgar',
|
||||||
|
native: 'アレクシア・ミドガル',
|
||||||
|
first: 'Alexia',
|
||||||
|
last: 'Midgar',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
const alexia = termBank.find(([term]) => term === 'アレクシア');
|
||||||
|
assert.ok(alexia);
|
||||||
|
|
||||||
|
const children = (
|
||||||
|
alexia[5][0] as {
|
||||||
|
content: { content: Array<Record<string, unknown>> };
|
||||||
|
}
|
||||||
|
).content.content;
|
||||||
|
const infoSection = children.find(
|
||||||
|
(c) =>
|
||||||
|
(c as { tag?: string }).tag === 'details' &&
|
||||||
|
Array.isArray((c as { content?: unknown[] }).content) &&
|
||||||
|
(c as { content: Array<{ content?: string }> }).content[0]?.content ===
|
||||||
|
'Character Information',
|
||||||
|
) as { content: Array<Record<string, unknown>> } | undefined;
|
||||||
|
assert.ok(infoSection);
|
||||||
|
const body = infoSection.content[1] as { content: Array<{ content?: string }> };
|
||||||
|
const flattened = JSON.stringify(body.content);
|
||||||
|
|
||||||
|
assert.match(flattened, /Female|♂ Male|♀ Female/);
|
||||||
|
assert.match(flattened, /15 years/);
|
||||||
|
assert.match(flattened, /Blood Type A/);
|
||||||
|
assert.match(flattened, /Birthday: September 1/);
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
test('generateForCurrentMedia preserves duplicate surface forms across different characters', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const originalFetch = globalThis.fetch;
|
||||||
|
|
||||||
|
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
|
||||||
|
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
|
||||||
|
if (url === GRAPHQL_URL) {
|
||||||
|
const body = JSON.parse(String(init?.body ?? '{}')) as {
|
||||||
|
query?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (body.query?.includes('Page(perPage: 10)')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Page: {
|
||||||
|
media: [
|
||||||
|
{
|
||||||
|
id: 130298,
|
||||||
|
episodes: 20,
|
||||||
|
title: {
|
||||||
|
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
native: '陰の実力者になりたくて!',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (body.query?.includes('characters(page: $page')) {
|
||||||
|
return new Response(
|
||||||
|
JSON.stringify({
|
||||||
|
data: {
|
||||||
|
Media: {
|
||||||
|
title: {
|
||||||
|
english: 'The Eminence in Shadow',
|
||||||
|
},
|
||||||
|
characters: {
|
||||||
|
pageInfo: { hasNextPage: false },
|
||||||
|
edges: [
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 111,
|
||||||
|
description: 'First Alpha.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Alpha One',
|
||||||
|
native: 'アルファ',
|
||||||
|
first: 'Alpha',
|
||||||
|
last: 'One',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
role: 'MAIN',
|
||||||
|
node: {
|
||||||
|
id: 222,
|
||||||
|
description: 'Second Alpha.',
|
||||||
|
image: null,
|
||||||
|
name: {
|
||||||
|
full: 'Alpha Two',
|
||||||
|
native: 'アルファ',
|
||||||
|
first: 'Alpha',
|
||||||
|
last: 'Two',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
{ status: 200, headers: { 'content-type': 'application/json' } },
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(`Unexpected fetch URL: ${url}`);
|
||||||
|
}) as typeof globalThis.fetch;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const runtime = createCharacterDictionaryRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
|
||||||
|
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
|
||||||
|
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
|
||||||
|
guessAnilistMediaInfo: async () => ({
|
||||||
|
title: 'The Eminence in Shadow',
|
||||||
|
episode: 5,
|
||||||
|
source: 'fallback',
|
||||||
|
}),
|
||||||
|
now: () => 1_700_000_000_000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = await runtime.generateForCurrentMedia();
|
||||||
|
const termBank = JSON.parse(
|
||||||
|
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
|
||||||
|
) as Array<
|
||||||
|
[
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
string,
|
||||||
|
number,
|
||||||
|
Array<string | Record<string, unknown>>,
|
||||||
|
number,
|
||||||
|
string,
|
||||||
|
]
|
||||||
|
>;
|
||||||
|
|
||||||
|
const alphaEntries = termBank.filter(([term]) => term === 'アルファ');
|
||||||
|
assert.equal(alphaEntries.length, 2);
|
||||||
|
const glossaries = alphaEntries.map((entry) =>
|
||||||
|
JSON.stringify(
|
||||||
|
(
|
||||||
|
entry[5][0] as {
|
||||||
|
content: { content: Array<Record<string, unknown>> };
|
||||||
|
}
|
||||||
|
).content.content,
|
||||||
|
),
|
||||||
|
);
|
||||||
|
assert.ok(glossaries.some((value) => value.includes('First Alpha.')));
|
||||||
|
assert.ok(glossaries.some((value) => value.includes('Second Alpha.')));
|
||||||
|
} finally {
|
||||||
|
globalThis.fetch = originalFetch;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data', async () => {
|
test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const originalFetch = globalThis.fetch;
|
const originalFetch = globalThis.fetch;
|
||||||
|
|||||||
@@ -10,21 +10,21 @@ const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
|
|||||||
const ANILIST_REQUEST_DELAY_MS = 2000;
|
const ANILIST_REQUEST_DELAY_MS = 2000;
|
||||||
const CHARACTER_IMAGE_DOWNLOAD_DELAY_MS = 250;
|
const CHARACTER_IMAGE_DOWNLOAD_DELAY_MS = 250;
|
||||||
const HONORIFIC_SUFFIXES = [
|
const HONORIFIC_SUFFIXES = [
|
||||||
'さん',
|
{ term: 'さん', reading: 'さん' },
|
||||||
'様',
|
{ term: '様', reading: 'さま' },
|
||||||
'先生',
|
{ term: '先生', reading: 'せんせい' },
|
||||||
'先輩',
|
{ term: '先輩', reading: 'せんぱい' },
|
||||||
'後輩',
|
{ term: '後輩', reading: 'こうはい' },
|
||||||
'氏',
|
{ term: '氏', reading: 'し' },
|
||||||
'君',
|
{ term: '君', reading: 'くん' },
|
||||||
'くん',
|
{ term: 'くん', reading: 'くん' },
|
||||||
'ちゃん',
|
{ term: 'ちゃん', reading: 'ちゃん' },
|
||||||
'たん',
|
{ term: 'たん', reading: 'たん' },
|
||||||
'坊',
|
{ term: '坊', reading: 'ぼう' },
|
||||||
'殿',
|
{ term: '殿', reading: 'どの' },
|
||||||
'博士',
|
{ term: '博士', reading: 'はかせ' },
|
||||||
'社長',
|
{ term: '社長', reading: 'しゃちょう' },
|
||||||
'部長',
|
{ term: '部長', reading: 'ぶちょう' },
|
||||||
] as const;
|
] as const;
|
||||||
type CharacterDictionaryRole = 'main' | 'primary' | 'side' | 'appears';
|
type CharacterDictionaryRole = 'main' | 'primary' | 'side' | 'appears';
|
||||||
|
|
||||||
@@ -45,6 +45,24 @@ type CharacterDictionarySnapshotImage = {
|
|||||||
dataBase64: string;
|
dataBase64: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
type CharacterBirthday = [number, number];
|
||||||
|
|
||||||
|
type JapaneseNameParts = {
|
||||||
|
hasSpace: boolean;
|
||||||
|
original: string;
|
||||||
|
combined: string;
|
||||||
|
family: string | null;
|
||||||
|
given: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
type NameReadings = {
|
||||||
|
hasSpace: boolean;
|
||||||
|
original: string;
|
||||||
|
full: string;
|
||||||
|
family: string;
|
||||||
|
given: string;
|
||||||
|
};
|
||||||
|
|
||||||
export type CharacterDictionarySnapshot = {
|
export type CharacterDictionarySnapshot = {
|
||||||
formatVersion: number;
|
formatVersion: number;
|
||||||
mediaId: number;
|
mediaId: number;
|
||||||
@@ -55,7 +73,7 @@ export type CharacterDictionarySnapshot = {
|
|||||||
images: CharacterDictionarySnapshotImage[];
|
images: CharacterDictionarySnapshotImage[];
|
||||||
};
|
};
|
||||||
|
|
||||||
const CHARACTER_DICTIONARY_FORMAT_VERSION = 14;
|
const CHARACTER_DICTIONARY_FORMAT_VERSION = 15;
|
||||||
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
|
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
|
||||||
|
|
||||||
type AniListSearchResponse = {
|
type AniListSearchResponse = {
|
||||||
@@ -103,8 +121,17 @@ type AniListCharacterPageResponse = {
|
|||||||
large?: string | null;
|
large?: string | null;
|
||||||
medium?: string | null;
|
medium?: string | null;
|
||||||
} | null;
|
} | null;
|
||||||
|
gender?: string | null;
|
||||||
|
age?: string | number | null;
|
||||||
|
dateOfBirth?: {
|
||||||
|
month?: number | null;
|
||||||
|
day?: number | null;
|
||||||
|
} | null;
|
||||||
|
bloodType?: string | null;
|
||||||
name?: {
|
name?: {
|
||||||
|
first?: string | null;
|
||||||
full?: string | null;
|
full?: string | null;
|
||||||
|
last?: string | null;
|
||||||
native?: string | null;
|
native?: string | null;
|
||||||
alternative?: Array<string | null> | null;
|
alternative?: Array<string | null> | null;
|
||||||
} | null;
|
} | null;
|
||||||
@@ -124,11 +151,17 @@ type VoiceActorRecord = {
|
|||||||
type CharacterRecord = {
|
type CharacterRecord = {
|
||||||
id: number;
|
id: number;
|
||||||
role: CharacterDictionaryRole;
|
role: CharacterDictionaryRole;
|
||||||
|
firstNameHint: string;
|
||||||
fullName: string;
|
fullName: string;
|
||||||
|
lastNameHint: string;
|
||||||
nativeName: string;
|
nativeName: string;
|
||||||
alternativeNames: string[];
|
alternativeNames: string[];
|
||||||
|
bloodType: string;
|
||||||
|
birthday: CharacterBirthday | null;
|
||||||
description: string;
|
description: string;
|
||||||
imageUrl: string | null;
|
imageUrl: string | null;
|
||||||
|
age: string;
|
||||||
|
sex: string;
|
||||||
voiceActors: VoiceActorRecord[];
|
voiceActors: VoiceActorRecord[];
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -161,6 +194,16 @@ export type CharacterDictionarySnapshotResult = {
|
|||||||
updatedAt: number;
|
updatedAt: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export type CharacterDictionarySnapshotProgress = {
|
||||||
|
mediaId: number;
|
||||||
|
mediaTitle: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
export type CharacterDictionarySnapshotProgressCallbacks = {
|
||||||
|
onChecking?: (progress: CharacterDictionarySnapshotProgress) => void;
|
||||||
|
onGenerating?: (progress: CharacterDictionarySnapshotProgress) => void;
|
||||||
|
};
|
||||||
|
|
||||||
export type MergedCharacterDictionaryBuildResult = {
|
export type MergedCharacterDictionaryBuildResult = {
|
||||||
zipPath: string;
|
zipPath: string;
|
||||||
revision: string;
|
revision: string;
|
||||||
@@ -263,6 +306,16 @@ function buildReading(term: string): string {
|
|||||||
return katakanaToHiragana(compact);
|
return katakanaToHiragana(compact);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function containsKanji(value: string): boolean {
|
||||||
|
for (const char of value) {
|
||||||
|
const code = char.charCodeAt(0);
|
||||||
|
if ((code >= 0x4e00 && code <= 0x9fff) || (code >= 0x3400 && code <= 0x4dbf)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
function isRomanizedName(value: string): boolean {
|
function isRomanizedName(value: string): boolean {
|
||||||
return /^[A-Za-zĀĪŪĒŌÂÊÎÔÛāīūēōâêîôû'’.\-\s]+$/.test(value);
|
return /^[A-Za-zĀĪŪĒŌÂÊÎÔÛāīūēōâêîôû'’.\-\s]+$/.test(value);
|
||||||
}
|
}
|
||||||
@@ -484,6 +537,67 @@ function romanizedTokenToKatakana(token: string): string | null {
|
|||||||
return output.length > 0 ? output : null;
|
return output.length > 0 ? output : null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function buildReadingFromRomanized(value: string): string {
|
||||||
|
const katakana = romanizedTokenToKatakana(value);
|
||||||
|
return katakana ? katakanaToHiragana(katakana) : '';
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildReadingFromHint(value: string): string {
|
||||||
|
return buildReading(value) || buildReadingFromRomanized(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
function scoreJapaneseNamePartLength(length: number): number {
|
||||||
|
if (length === 2) return 3;
|
||||||
|
if (length === 1 || length === 3) return 2;
|
||||||
|
if (length === 4) return 1;
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function inferJapaneseNameSplitIndex(
|
||||||
|
nameOriginal: string,
|
||||||
|
firstNameHint: string,
|
||||||
|
lastNameHint: string,
|
||||||
|
): number | null {
|
||||||
|
const chars = [...nameOriginal];
|
||||||
|
if (chars.length < 2) return null;
|
||||||
|
|
||||||
|
const familyHintLength = [...buildReadingFromHint(lastNameHint)].length;
|
||||||
|
const givenHintLength = [...buildReadingFromHint(firstNameHint)].length;
|
||||||
|
const totalHintLength = familyHintLength + givenHintLength;
|
||||||
|
const defaultBoundary = Math.round(chars.length / 2);
|
||||||
|
let bestIndex: number | null = null;
|
||||||
|
let bestScore = Number.NEGATIVE_INFINITY;
|
||||||
|
|
||||||
|
for (let index = 1; index < chars.length; index += 1) {
|
||||||
|
const familyLength = index;
|
||||||
|
const givenLength = chars.length - index;
|
||||||
|
let score =
|
||||||
|
scoreJapaneseNamePartLength(familyLength) + scoreJapaneseNamePartLength(givenLength);
|
||||||
|
|
||||||
|
if (chars.length >= 4 && familyLength >= 2 && givenLength >= 2) {
|
||||||
|
score += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (totalHintLength > 0) {
|
||||||
|
const expectedFamilyLength = (chars.length * familyHintLength) / totalHintLength;
|
||||||
|
score -= Math.abs(familyLength - expectedFamilyLength) * 1.5;
|
||||||
|
} else {
|
||||||
|
score -= Math.abs(familyLength - defaultBoundary) * 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (familyLength === givenLength) {
|
||||||
|
score += 0.25;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (score > bestScore) {
|
||||||
|
bestScore = score;
|
||||||
|
bestIndex = index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return bestIndex;
|
||||||
|
}
|
||||||
|
|
||||||
function addRomanizedKanaAliases(values: Iterable<string>): string[] {
|
function addRomanizedKanaAliases(values: Iterable<string>): string[] {
|
||||||
const aliases = new Set<string>();
|
const aliases = new Set<string>();
|
||||||
for (const value of values) {
|
for (const value of values) {
|
||||||
@@ -497,6 +611,166 @@ function addRomanizedKanaAliases(values: Iterable<string>): string[] {
|
|||||||
return [...aliases];
|
return [...aliases];
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function splitJapaneseName(
|
||||||
|
nameOriginal: string,
|
||||||
|
firstNameHint?: string,
|
||||||
|
lastNameHint?: string,
|
||||||
|
): JapaneseNameParts {
|
||||||
|
const trimmed = nameOriginal.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return {
|
||||||
|
hasSpace: false,
|
||||||
|
original: '',
|
||||||
|
combined: '',
|
||||||
|
family: null,
|
||||||
|
given: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalizedSpace = trimmed.replace(/[\s\u3000]+/g, ' ').trim();
|
||||||
|
const spaceParts = normalizedSpace.split(' ').filter((part) => part.length > 0);
|
||||||
|
if (spaceParts.length === 2) {
|
||||||
|
const family = spaceParts[0]!;
|
||||||
|
const given = spaceParts[1]!;
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: normalizedSpace,
|
||||||
|
combined: `${family}${given}`,
|
||||||
|
family,
|
||||||
|
given,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const middleDotParts = trimmed
|
||||||
|
.split(/[・・·•]/)
|
||||||
|
.map((part) => part.trim())
|
||||||
|
.filter((part) => part.length > 0);
|
||||||
|
if (middleDotParts.length === 2) {
|
||||||
|
const family = middleDotParts[0]!;
|
||||||
|
const given = middleDotParts[1]!;
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: trimmed,
|
||||||
|
combined: `${family}${given}`,
|
||||||
|
family,
|
||||||
|
given,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const hintedFirst = firstNameHint?.trim() || '';
|
||||||
|
const hintedLast = lastNameHint?.trim() || '';
|
||||||
|
if (hintedFirst && hintedLast) {
|
||||||
|
const familyGiven = `${hintedLast}${hintedFirst}`;
|
||||||
|
if (trimmed === familyGiven) {
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: trimmed,
|
||||||
|
combined: familyGiven,
|
||||||
|
family: hintedLast,
|
||||||
|
given: hintedFirst,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const givenFamily = `${hintedFirst}${hintedLast}`;
|
||||||
|
if (trimmed === givenFamily) {
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: trimmed,
|
||||||
|
combined: givenFamily,
|
||||||
|
family: hintedFirst,
|
||||||
|
given: hintedLast,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (hintedFirst && hintedLast && containsKanji(trimmed)) {
|
||||||
|
const splitIndex = inferJapaneseNameSplitIndex(trimmed, hintedFirst, hintedLast);
|
||||||
|
if (splitIndex != null) {
|
||||||
|
const chars = [...trimmed];
|
||||||
|
const family = chars.slice(0, splitIndex).join('');
|
||||||
|
const given = chars.slice(splitIndex).join('');
|
||||||
|
if (family && given) {
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: trimmed,
|
||||||
|
combined: trimmed,
|
||||||
|
family,
|
||||||
|
given,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
hasSpace: false,
|
||||||
|
original: trimmed,
|
||||||
|
combined: trimmed,
|
||||||
|
family: null,
|
||||||
|
given: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function generateNameReadings(
|
||||||
|
nameOriginal: string,
|
||||||
|
romanizedName: string,
|
||||||
|
firstNameHint?: string,
|
||||||
|
lastNameHint?: string,
|
||||||
|
): NameReadings {
|
||||||
|
const trimmed = nameOriginal.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return {
|
||||||
|
hasSpace: false,
|
||||||
|
original: '',
|
||||||
|
full: '',
|
||||||
|
family: '',
|
||||||
|
given: '',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const nameParts = splitJapaneseName(trimmed, firstNameHint, lastNameHint);
|
||||||
|
if (!nameParts.hasSpace || !nameParts.family || !nameParts.given) {
|
||||||
|
const full = containsKanji(trimmed)
|
||||||
|
? buildReadingFromRomanized(romanizedName)
|
||||||
|
: buildReading(trimmed);
|
||||||
|
return {
|
||||||
|
hasSpace: false,
|
||||||
|
original: trimmed,
|
||||||
|
full,
|
||||||
|
family: full,
|
||||||
|
given: full,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const romanizedParts = romanizedName
|
||||||
|
.trim()
|
||||||
|
.split(/\s+/)
|
||||||
|
.filter((part) => part.length > 0);
|
||||||
|
const familyFromHints = buildReadingFromHint(lastNameHint || '');
|
||||||
|
const givenFromHints = buildReadingFromHint(firstNameHint || '');
|
||||||
|
const familyRomajiFallback = romanizedParts[0] || '';
|
||||||
|
const givenRomajiFallback = romanizedParts.slice(1).join(' ');
|
||||||
|
const family =
|
||||||
|
familyFromHints ||
|
||||||
|
(containsKanji(nameParts.family)
|
||||||
|
? buildReadingFromRomanized(familyRomajiFallback)
|
||||||
|
: buildReading(nameParts.family));
|
||||||
|
const given =
|
||||||
|
givenFromHints ||
|
||||||
|
(containsKanji(nameParts.given)
|
||||||
|
? buildReadingFromRomanized(givenRomajiFallback)
|
||||||
|
: buildReading(nameParts.given));
|
||||||
|
const full =
|
||||||
|
`${family}${given}` || buildReading(trimmed) || buildReadingFromRomanized(romanizedName);
|
||||||
|
|
||||||
|
return {
|
||||||
|
hasSpace: true,
|
||||||
|
original: nameParts.original,
|
||||||
|
full,
|
||||||
|
family,
|
||||||
|
given,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
function expandRawNameVariants(rawName: string): string[] {
|
function expandRawNameVariants(rawName: string): string[] {
|
||||||
const trimmed = rawName.trim();
|
const trimmed = rawName.trim();
|
||||||
if (!trimmed) return [];
|
if (!trimmed) return [];
|
||||||
@@ -555,24 +829,125 @@ function buildNameTerms(character: CharacterRecord): string[] {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const nativeParts = splitJapaneseName(
|
||||||
|
character.nativeName,
|
||||||
|
character.firstNameHint,
|
||||||
|
character.lastNameHint,
|
||||||
|
);
|
||||||
|
if (nativeParts.family) {
|
||||||
|
base.add(nativeParts.family);
|
||||||
|
}
|
||||||
|
if (nativeParts.given) {
|
||||||
|
base.add(nativeParts.given);
|
||||||
|
}
|
||||||
|
|
||||||
const withHonorifics = new Set<string>();
|
const withHonorifics = new Set<string>();
|
||||||
for (const entry of base) {
|
for (const entry of base) {
|
||||||
withHonorifics.add(entry);
|
withHonorifics.add(entry);
|
||||||
for (const suffix of HONORIFIC_SUFFIXES) {
|
for (const suffix of HONORIFIC_SUFFIXES) {
|
||||||
withHonorifics.add(`${entry}${suffix}`);
|
withHonorifics.add(`${entry}${suffix.term}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const alias of addRomanizedKanaAliases(withHonorifics)) {
|
for (const alias of addRomanizedKanaAliases(withHonorifics)) {
|
||||||
withHonorifics.add(alias);
|
withHonorifics.add(alias);
|
||||||
for (const suffix of HONORIFIC_SUFFIXES) {
|
for (const suffix of HONORIFIC_SUFFIXES) {
|
||||||
withHonorifics.add(`${alias}${suffix}`);
|
withHonorifics.add(`${alias}${suffix.term}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return [...withHonorifics].filter((entry) => entry.trim().length > 0);
|
return [...withHonorifics].filter((entry) => entry.trim().length > 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const MONTH_NAMES: ReadonlyArray<[number, string]> = [
|
||||||
|
[1, 'January'],
|
||||||
|
[2, 'February'],
|
||||||
|
[3, 'March'],
|
||||||
|
[4, 'April'],
|
||||||
|
[5, 'May'],
|
||||||
|
[6, 'June'],
|
||||||
|
[7, 'July'],
|
||||||
|
[8, 'August'],
|
||||||
|
[9, 'September'],
|
||||||
|
[10, 'October'],
|
||||||
|
[11, 'November'],
|
||||||
|
[12, 'December'],
|
||||||
|
];
|
||||||
|
|
||||||
|
const SEX_DISPLAY: ReadonlyArray<[string, string]> = [
|
||||||
|
['m', '♂ Male'],
|
||||||
|
['f', '♀ Female'],
|
||||||
|
['male', '♂ Male'],
|
||||||
|
['female', '♀ Female'],
|
||||||
|
];
|
||||||
|
|
||||||
|
function formatBirthday(birthday: CharacterBirthday | null): string {
|
||||||
|
if (!birthday) return '';
|
||||||
|
const [month, day] = birthday;
|
||||||
|
const monthName = MONTH_NAMES.find(([m]) => m === month)?.[1] || 'Unknown';
|
||||||
|
return `${monthName} ${day}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatCharacterStats(character: CharacterRecord): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
const normalizedSex = character.sex.trim().toLowerCase();
|
||||||
|
const sexDisplay = SEX_DISPLAY.find(([key]) => key === normalizedSex)?.[1];
|
||||||
|
if (sexDisplay) parts.push(sexDisplay);
|
||||||
|
if (character.age.trim()) parts.push(`${character.age.trim()} years`);
|
||||||
|
if (character.bloodType.trim()) parts.push(`Blood Type ${character.bloodType.trim()}`);
|
||||||
|
const birthday = formatBirthday(character.birthday);
|
||||||
|
if (birthday) parts.push(`Birthday: ${birthday}`);
|
||||||
|
return parts.join(' • ');
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildReadingForTerm(
|
||||||
|
term: string,
|
||||||
|
character: CharacterRecord,
|
||||||
|
readings: NameReadings,
|
||||||
|
nameParts: JapaneseNameParts,
|
||||||
|
): string {
|
||||||
|
for (const suffix of HONORIFIC_SUFFIXES) {
|
||||||
|
if (term.endsWith(suffix.term) && term.length > suffix.term.length) {
|
||||||
|
const baseTerm = term.slice(0, -suffix.term.length);
|
||||||
|
const baseReading = buildReadingForTerm(baseTerm, character, readings, nameParts);
|
||||||
|
return baseReading ? `${baseReading}${suffix.reading}` : '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const compactNative = character.nativeName.replace(/[\s\u3000]+/g, '');
|
||||||
|
const noMiddleDotsNative = compactNative.replace(/[・・·•]/g, '');
|
||||||
|
if (
|
||||||
|
term === character.nativeName ||
|
||||||
|
term === compactNative ||
|
||||||
|
term === noMiddleDotsNative ||
|
||||||
|
term === nameParts.original ||
|
||||||
|
term === nameParts.combined
|
||||||
|
) {
|
||||||
|
return readings.full;
|
||||||
|
}
|
||||||
|
|
||||||
|
const familyCompact = nameParts.family?.replace(/[・・·•]/g, '') || '';
|
||||||
|
if (nameParts.family && (term === nameParts.family || term === familyCompact)) {
|
||||||
|
return readings.family;
|
||||||
|
}
|
||||||
|
|
||||||
|
const givenCompact = nameParts.given?.replace(/[・・·•]/g, '') || '';
|
||||||
|
if (nameParts.given && (term === nameParts.given || term === givenCompact)) {
|
||||||
|
return readings.given;
|
||||||
|
}
|
||||||
|
|
||||||
|
const compact = term.replace(/[\s\u3000]+/g, '');
|
||||||
|
if (hasKanaOnly(compact)) {
|
||||||
|
return buildReading(compact);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isRomanizedName(term)) {
|
||||||
|
return buildReadingFromRomanized(term) || readings.full;
|
||||||
|
}
|
||||||
|
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
function parseCharacterDescription(raw: string): {
|
function parseCharacterDescription(raw: string): {
|
||||||
fields: Array<{ key: string; value: string }>;
|
fields: Array<{ key: string; value: string }>;
|
||||||
text: string;
|
text: string;
|
||||||
@@ -623,16 +998,16 @@ function roleInfo(role: CharacterDictionaryRole): { tag: string; score: number }
|
|||||||
function mapRole(input: string | null | undefined): CharacterDictionaryRole {
|
function mapRole(input: string | null | undefined): CharacterDictionaryRole {
|
||||||
const value = (input || '').trim().toUpperCase();
|
const value = (input || '').trim().toUpperCase();
|
||||||
if (value === 'MAIN') return 'main';
|
if (value === 'MAIN') return 'main';
|
||||||
if (value === 'BACKGROUND') return 'appears';
|
if (value === 'SUPPORTING') return 'primary';
|
||||||
if (value === 'SUPPORTING') return 'side';
|
if (value === 'BACKGROUND') return 'side';
|
||||||
return 'primary';
|
return 'side';
|
||||||
}
|
}
|
||||||
|
|
||||||
function roleLabel(role: CharacterDictionaryRole): string {
|
function roleLabel(role: CharacterDictionaryRole): string {
|
||||||
if (role === 'main') return 'Main';
|
if (role === 'main') return 'Protagonist';
|
||||||
if (role === 'primary') return 'Primary';
|
if (role === 'primary') return 'Main Character';
|
||||||
if (role === 'side') return 'Side';
|
if (role === 'side') return 'Side Character';
|
||||||
return 'Appears';
|
return 'Minor Role';
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferImageExt(contentType: string | null): string {
|
function inferImageExt(contentType: string | null): string {
|
||||||
@@ -780,10 +1155,10 @@ function roleBadgeStyle(role: CharacterDictionaryRole): Record<string, string> {
|
|||||||
fontWeight: 'bold',
|
fontWeight: 'bold',
|
||||||
color: '#fff',
|
color: '#fff',
|
||||||
};
|
};
|
||||||
if (role === 'main') return { ...base, backgroundColor: '#4a8c3f' };
|
if (role === 'main') return { ...base, backgroundColor: '#4CAF50' };
|
||||||
if (role === 'primary') return { ...base, backgroundColor: '#5c82b0' };
|
if (role === 'primary') return { ...base, backgroundColor: '#2196F3' };
|
||||||
if (role === 'side') return { ...base, backgroundColor: '#7889a0' };
|
if (role === 'side') return { ...base, backgroundColor: '#FF9800' };
|
||||||
return { ...base, backgroundColor: '#777' };
|
return { ...base, backgroundColor: '#9E9E9E' };
|
||||||
}
|
}
|
||||||
|
|
||||||
function buildCollapsibleSection(
|
function buildCollapsibleSection(
|
||||||
@@ -939,10 +1314,11 @@ function createDefinitionGlossary(
|
|||||||
content: {
|
content: {
|
||||||
tag: 'span',
|
tag: 'span',
|
||||||
style: roleBadgeStyle(character.role),
|
style: roleBadgeStyle(character.role),
|
||||||
content: `${roleLabel(character.role)} Character`,
|
content: roleLabel(character.role),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const statsLine = formatCharacterStats(character);
|
||||||
if (descriptionText) {
|
if (descriptionText) {
|
||||||
content.push(
|
content.push(
|
||||||
buildCollapsibleSection(
|
buildCollapsibleSection(
|
||||||
@@ -953,11 +1329,21 @@ function createDefinitionGlossary(
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fields.length > 0) {
|
const fieldItems: Array<Record<string, unknown>> = [];
|
||||||
const fieldItems: Array<Record<string, unknown>> = fields.map((f) => ({
|
if (statsLine) {
|
||||||
|
fieldItems.push({
|
||||||
|
tag: 'li',
|
||||||
|
style: { fontWeight: 'bold' },
|
||||||
|
content: statsLine,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
fieldItems.push(
|
||||||
|
...fields.map((f) => ({
|
||||||
tag: 'li',
|
tag: 'li',
|
||||||
content: `${f.key}: ${f.value}`,
|
content: `${f.key}: ${f.value}`,
|
||||||
}));
|
})),
|
||||||
|
);
|
||||||
|
if (fieldItems.length > 0) {
|
||||||
content.push(
|
content.push(
|
||||||
buildCollapsibleSection(
|
buildCollapsibleSection(
|
||||||
'Character Information',
|
'Character Information',
|
||||||
@@ -1248,12 +1634,21 @@ async function fetchCharactersForMedia(
|
|||||||
node {
|
node {
|
||||||
id
|
id
|
||||||
description(asHtml: false)
|
description(asHtml: false)
|
||||||
|
gender
|
||||||
|
age
|
||||||
|
dateOfBirth {
|
||||||
|
month
|
||||||
|
day
|
||||||
|
}
|
||||||
|
bloodType
|
||||||
image {
|
image {
|
||||||
large
|
large
|
||||||
medium
|
medium
|
||||||
}
|
}
|
||||||
name {
|
name {
|
||||||
|
first
|
||||||
full
|
full
|
||||||
|
last
|
||||||
native
|
native
|
||||||
alternative
|
alternative
|
||||||
}
|
}
|
||||||
@@ -1287,7 +1682,9 @@ async function fetchCharactersForMedia(
|
|||||||
for (const edge of edges) {
|
for (const edge of edges) {
|
||||||
const node = edge?.node;
|
const node = edge?.node;
|
||||||
if (!node || typeof node.id !== 'number') continue;
|
if (!node || typeof node.id !== 'number') continue;
|
||||||
|
const firstNameHint = node.name?.first?.trim() || '';
|
||||||
const fullName = node.name?.full?.trim() || '';
|
const fullName = node.name?.full?.trim() || '';
|
||||||
|
const lastNameHint = node.name?.last?.trim() || '';
|
||||||
const nativeName = node.name?.native?.trim() || '';
|
const nativeName = node.name?.native?.trim() || '';
|
||||||
const alternativeNames = [
|
const alternativeNames = [
|
||||||
...new Set(
|
...new Set(
|
||||||
@@ -1297,7 +1694,7 @@ async function fetchCharactersForMedia(
|
|||||||
.filter((value) => value.length > 0),
|
.filter((value) => value.length > 0),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
if (!fullName && !nativeName && alternativeNames.length === 0) continue;
|
if (!nativeName) continue;
|
||||||
const voiceActors: VoiceActorRecord[] = [];
|
const voiceActors: VoiceActorRecord[] = [];
|
||||||
for (const va of edge?.voiceActors ?? []) {
|
for (const va of edge?.voiceActors ?? []) {
|
||||||
if (!va || typeof va.id !== 'number') continue;
|
if (!va || typeof va.id !== 'number') continue;
|
||||||
@@ -1314,11 +1711,25 @@ async function fetchCharactersForMedia(
|
|||||||
characters.push({
|
characters.push({
|
||||||
id: node.id,
|
id: node.id,
|
||||||
role: mapRole(edge?.role),
|
role: mapRole(edge?.role),
|
||||||
|
firstNameHint,
|
||||||
fullName,
|
fullName,
|
||||||
|
lastNameHint,
|
||||||
nativeName,
|
nativeName,
|
||||||
alternativeNames,
|
alternativeNames,
|
||||||
|
bloodType: node.bloodType?.trim() || '',
|
||||||
|
birthday:
|
||||||
|
typeof node.dateOfBirth?.month === 'number' && typeof node.dateOfBirth?.day === 'number'
|
||||||
|
? [node.dateOfBirth.month, node.dateOfBirth.day]
|
||||||
|
: null,
|
||||||
description: node.description || '',
|
description: node.description || '',
|
||||||
imageUrl: node.image?.large || node.image?.medium || null,
|
imageUrl: node.image?.large || node.image?.medium || null,
|
||||||
|
age:
|
||||||
|
typeof node.age === 'string'
|
||||||
|
? node.age.trim()
|
||||||
|
: typeof node.age === 'number'
|
||||||
|
? String(node.age)
|
||||||
|
: '',
|
||||||
|
sex: node.gender?.trim() || '',
|
||||||
voiceActors,
|
voiceActors,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@@ -1400,9 +1811,9 @@ function buildSnapshotFromCharacters(
|
|||||||
) => boolean,
|
) => boolean,
|
||||||
): CharacterDictionarySnapshot {
|
): CharacterDictionarySnapshot {
|
||||||
const termEntries: CharacterDictionaryTermEntry[] = [];
|
const termEntries: CharacterDictionaryTermEntry[] = [];
|
||||||
const seen = new Set<string>();
|
|
||||||
|
|
||||||
for (const character of characters) {
|
for (const character of characters) {
|
||||||
|
const seenTerms = new Set<string>();
|
||||||
const imagePath = imagesByCharacterId.get(character.id)?.path ?? null;
|
const imagePath = imagesByCharacterId.get(character.id)?.path ?? null;
|
||||||
const vaImagePaths = new Map<number, string>();
|
const vaImagePaths = new Map<number, string>();
|
||||||
for (const va of character.voiceActors) {
|
for (const va of character.voiceActors) {
|
||||||
@@ -1417,11 +1828,21 @@ function buildSnapshotFromCharacters(
|
|||||||
getCollapsibleSectionOpenState,
|
getCollapsibleSectionOpenState,
|
||||||
);
|
);
|
||||||
const candidateTerms = buildNameTerms(character);
|
const candidateTerms = buildNameTerms(character);
|
||||||
|
const nameParts = splitJapaneseName(
|
||||||
|
character.nativeName,
|
||||||
|
character.firstNameHint,
|
||||||
|
character.lastNameHint,
|
||||||
|
);
|
||||||
|
const readings = generateNameReadings(
|
||||||
|
character.nativeName,
|
||||||
|
character.fullName,
|
||||||
|
character.firstNameHint,
|
||||||
|
character.lastNameHint,
|
||||||
|
);
|
||||||
for (const term of candidateTerms) {
|
for (const term of candidateTerms) {
|
||||||
const reading = buildReading(term);
|
if (seenTerms.has(term)) continue;
|
||||||
const dedupeKey = `${term}|${reading}|${character.role}`;
|
seenTerms.add(term);
|
||||||
if (seen.has(dedupeKey)) continue;
|
const reading = buildReadingForTerm(term, character, readings, nameParts);
|
||||||
seen.add(dedupeKey);
|
|
||||||
termEntries.push(buildTermEntry(term, reading, character.role, glossary));
|
termEntries.push(buildTermEntry(term, reading, character.role, glossary));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1560,7 +1981,10 @@ function buildMergedRevision(mediaIds: number[], snapshots: CharacterDictionaryS
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function createCharacterDictionaryRuntimeService(deps: CharacterDictionaryRuntimeDeps): {
|
export function createCharacterDictionaryRuntimeService(deps: CharacterDictionaryRuntimeDeps): {
|
||||||
getOrCreateCurrentSnapshot: (targetPath?: string) => Promise<CharacterDictionarySnapshotResult>;
|
getOrCreateCurrentSnapshot: (
|
||||||
|
targetPath?: string,
|
||||||
|
progress?: CharacterDictionarySnapshotProgressCallbacks,
|
||||||
|
) => Promise<CharacterDictionarySnapshotResult>;
|
||||||
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
|
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
|
||||||
generateForCurrentMedia: (
|
generateForCurrentMedia: (
|
||||||
targetPath?: string,
|
targetPath?: string,
|
||||||
@@ -1606,6 +2030,7 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
mediaId: number,
|
mediaId: number,
|
||||||
mediaTitleHint?: string,
|
mediaTitleHint?: string,
|
||||||
beforeRequest?: () => Promise<void>,
|
beforeRequest?: () => Promise<void>,
|
||||||
|
progress?: CharacterDictionarySnapshotProgressCallbacks,
|
||||||
): Promise<CharacterDictionarySnapshotResult> => {
|
): Promise<CharacterDictionarySnapshotResult> => {
|
||||||
const snapshotPath = getSnapshotPath(outputDir, mediaId);
|
const snapshotPath = getSnapshotPath(outputDir, mediaId);
|
||||||
const cachedSnapshot = readSnapshot(snapshotPath);
|
const cachedSnapshot = readSnapshot(snapshotPath);
|
||||||
@@ -1620,6 +2045,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
progress?.onGenerating?.({
|
||||||
|
mediaId,
|
||||||
|
mediaTitle: mediaTitleHint || `AniList ${mediaId}`,
|
||||||
|
});
|
||||||
deps.logInfo?.(`[dictionary] snapshot miss for AniList ${mediaId}, fetching characters`);
|
deps.logInfo?.(`[dictionary] snapshot miss for AniList ${mediaId}, fetching characters`);
|
||||||
|
|
||||||
const { mediaTitle: fetchedMediaTitle, characters } = await fetchCharactersForMedia(
|
const { mediaTitle: fetchedMediaTitle, characters } = await fetchCharactersForMedia(
|
||||||
@@ -1700,7 +2129,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
};
|
};
|
||||||
|
|
||||||
return {
|
return {
|
||||||
getOrCreateCurrentSnapshot: async (targetPath?: string) => {
|
getOrCreateCurrentSnapshot: async (
|
||||||
|
targetPath?: string,
|
||||||
|
progress?: CharacterDictionarySnapshotProgressCallbacks,
|
||||||
|
) => {
|
||||||
let hasAniListRequest = false;
|
let hasAniListRequest = false;
|
||||||
const waitForAniListRequestSlot = async (): Promise<void> => {
|
const waitForAniListRequestSlot = async (): Promise<void> => {
|
||||||
if (!hasAniListRequest) {
|
if (!hasAniListRequest) {
|
||||||
@@ -1710,7 +2142,16 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
|
|||||||
await sleepMs(ANILIST_REQUEST_DELAY_MS);
|
await sleepMs(ANILIST_REQUEST_DELAY_MS);
|
||||||
};
|
};
|
||||||
const resolvedMedia = await resolveCurrentMedia(targetPath, waitForAniListRequestSlot);
|
const resolvedMedia = await resolveCurrentMedia(targetPath, waitForAniListRequestSlot);
|
||||||
return getOrCreateSnapshot(resolvedMedia.id, resolvedMedia.title, waitForAniListRequestSlot);
|
progress?.onChecking?.({
|
||||||
|
mediaId: resolvedMedia.id,
|
||||||
|
mediaTitle: resolvedMedia.title,
|
||||||
|
});
|
||||||
|
return getOrCreateSnapshot(
|
||||||
|
resolvedMedia.id,
|
||||||
|
resolvedMedia.title,
|
||||||
|
waitForAniListRequestSlot,
|
||||||
|
progress,
|
||||||
|
);
|
||||||
},
|
},
|
||||||
buildMergedDictionary: async (mediaIds: number[]) => {
|
buildMergedDictionary: async (mediaIds: number[]) => {
|
||||||
const normalizedMediaIds = mediaIds
|
const normalizedMediaIds = mediaIds
|
||||||
|
|||||||
@@ -0,0 +1,92 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import test from 'node:test';
|
||||||
|
import {
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus,
|
||||||
|
type CharacterDictionaryAutoSyncNotificationEvent,
|
||||||
|
} from './character-dictionary-auto-sync-notifications';
|
||||||
|
|
||||||
|
function makeEvent(
|
||||||
|
phase: CharacterDictionaryAutoSyncNotificationEvent['phase'],
|
||||||
|
message: string,
|
||||||
|
): CharacterDictionaryAutoSyncNotificationEvent {
|
||||||
|
return {
|
||||||
|
phase,
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
test('auto sync notifications send osd updates for progress phases', () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('checking', 'checking'), {
|
||||||
|
getNotificationType: () => 'osd',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('generating', 'generating'), {
|
||||||
|
getNotificationType: () => 'osd',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('syncing', 'syncing'), {
|
||||||
|
getNotificationType: () => 'osd',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('importing', 'importing'), {
|
||||||
|
getNotificationType: () => 'osd',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('ready', 'ready'), {
|
||||||
|
getNotificationType: () => 'osd',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(calls, [
|
||||||
|
'osd:checking',
|
||||||
|
'osd:generating',
|
||||||
|
'osd:syncing',
|
||||||
|
'osd:importing',
|
||||||
|
'osd:ready',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('auto sync notifications never send desktop notifications', () => {
|
||||||
|
const calls: string[] = [];
|
||||||
|
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('syncing', 'syncing'), {
|
||||||
|
getNotificationType: () => 'both',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('importing', 'importing'), {
|
||||||
|
getNotificationType: () => 'both',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('ready', 'ready'), {
|
||||||
|
getNotificationType: () => 'both',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
notifyCharacterDictionaryAutoSyncStatus(makeEvent('failed', 'failed'), {
|
||||||
|
getNotificationType: () => 'both',
|
||||||
|
showOsd: (message) => calls.push(`osd:${message}`),
|
||||||
|
showDesktopNotification: (title, options) =>
|
||||||
|
calls.push(`desktop:${title}:${options.body ?? ''}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
assert.deepEqual(calls, ['osd:syncing', 'osd:importing', 'osd:ready', 'osd:failed']);
|
||||||
|
});
|
||||||
@@ -0,0 +1,34 @@
|
|||||||
|
import type { CharacterDictionaryAutoSyncStatusEvent } from './character-dictionary-auto-sync';
|
||||||
|
import type { StartupOsdSequencerCharacterDictionaryEvent } from './startup-osd-sequencer';
|
||||||
|
|
||||||
|
export type CharacterDictionaryAutoSyncNotificationEvent = CharacterDictionaryAutoSyncStatusEvent;
|
||||||
|
|
||||||
|
export interface CharacterDictionaryAutoSyncNotificationDeps {
|
||||||
|
getNotificationType: () => 'osd' | 'system' | 'both' | 'none' | undefined;
|
||||||
|
showOsd: (message: string) => void;
|
||||||
|
showDesktopNotification: (title: string, options: { body?: string }) => void;
|
||||||
|
startupOsdSequencer?: {
|
||||||
|
notifyCharacterDictionaryStatus: (event: StartupOsdSequencerCharacterDictionaryEvent) => void;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function shouldShowOsd(type: 'osd' | 'system' | 'both' | 'none' | undefined): boolean {
|
||||||
|
return type !== 'none';
|
||||||
|
}
|
||||||
|
|
||||||
|
export function notifyCharacterDictionaryAutoSyncStatus(
|
||||||
|
event: CharacterDictionaryAutoSyncNotificationEvent,
|
||||||
|
deps: CharacterDictionaryAutoSyncNotificationDeps,
|
||||||
|
): void {
|
||||||
|
const type = deps.getNotificationType();
|
||||||
|
if (shouldShowOsd(type)) {
|
||||||
|
if (deps.startupOsdSequencer) {
|
||||||
|
deps.startupOsdSequencer.notifyCharacterDictionaryStatus({
|
||||||
|
phase: event.phase,
|
||||||
|
message: event.message,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
deps.showOsd(event.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -9,6 +9,14 @@ function makeTempDir(): string {
|
|||||||
return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-char-dict-auto-sync-'));
|
return fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-char-dict-auto-sync-'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function createDeferred<T>(): { promise: Promise<T>; resolve: (value: T) => void } {
|
||||||
|
let resolve!: (value: T) => void;
|
||||||
|
const promise = new Promise<T>((nextResolve) => {
|
||||||
|
resolve = nextResolve;
|
||||||
|
});
|
||||||
|
return { promise, resolve };
|
||||||
|
}
|
||||||
|
|
||||||
test('auto sync imports merged dictionary and persists MRU state', async () => {
|
test('auto sync imports merged dictionary and persists MRU state', async () => {
|
||||||
const userDataPath = makeTempDir();
|
const userDataPath = makeTempDir();
|
||||||
const imported: string[] = [];
|
const imported: string[] = [];
|
||||||
@@ -267,3 +275,373 @@ test('auto sync evicts least recently used media from merged set', async () => {
|
|||||||
};
|
};
|
||||||
assert.deepEqual(state.activeMediaIds, [4, 3, 2]);
|
assert.deepEqual(state.activeMediaIds, [4, 3, 2]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('auto sync invokes completion callback after successful sync', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const completions: Array<{ mediaId: number; mediaTitle: string; changed: boolean }> = [];
|
||||||
|
let importedRevision: string | null = null;
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getConfig: () => ({
|
||||||
|
enabled: true,
|
||||||
|
maxLoaded: 3,
|
||||||
|
profileScope: 'all',
|
||||||
|
}),
|
||||||
|
getOrCreateCurrentSnapshot: async () => ({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
entryCount: 2560,
|
||||||
|
fromCache: false,
|
||||||
|
updatedAt: 1000,
|
||||||
|
}),
|
||||||
|
buildMergedDictionary: async () => ({
|
||||||
|
zipPath: '/tmp/merged.zip',
|
||||||
|
revision: 'rev-101291',
|
||||||
|
dictionaryTitle: 'SubMiner Character Dictionary',
|
||||||
|
entryCount: 2560,
|
||||||
|
}),
|
||||||
|
getYomitanDictionaryInfo: async () =>
|
||||||
|
importedRevision
|
||||||
|
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
|
||||||
|
: [],
|
||||||
|
importYomitanDictionary: async () => {
|
||||||
|
importedRevision = 'rev-101291';
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
deleteYomitanDictionary: async () => true,
|
||||||
|
upsertYomitanDictionarySettings: async () => true,
|
||||||
|
now: () => 1000,
|
||||||
|
onSyncComplete: (completion) => {
|
||||||
|
completions.push(completion);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await runtime.runSyncNow();
|
||||||
|
|
||||||
|
assert.deepEqual(completions, [
|
||||||
|
{
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
changed: true,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('auto sync emits progress events for start import and completion', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const events: Array<{
|
||||||
|
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
|
||||||
|
mediaId?: number;
|
||||||
|
mediaTitle?: string;
|
||||||
|
message: string;
|
||||||
|
changed?: boolean;
|
||||||
|
}> = [];
|
||||||
|
let importedRevision: string | null = null;
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getConfig: () => ({
|
||||||
|
enabled: true,
|
||||||
|
maxLoaded: 3,
|
||||||
|
profileScope: 'all',
|
||||||
|
}),
|
||||||
|
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
|
||||||
|
progress?.onChecking?.({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
});
|
||||||
|
progress?.onGenerating?.({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
entryCount: 2560,
|
||||||
|
fromCache: false,
|
||||||
|
updatedAt: 1000,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
buildMergedDictionary: async () => ({
|
||||||
|
zipPath: '/tmp/merged.zip',
|
||||||
|
revision: 'rev-101291',
|
||||||
|
dictionaryTitle: 'SubMiner Character Dictionary',
|
||||||
|
entryCount: 2560,
|
||||||
|
}),
|
||||||
|
getYomitanDictionaryInfo: async () =>
|
||||||
|
importedRevision
|
||||||
|
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
|
||||||
|
: [],
|
||||||
|
importYomitanDictionary: async () => {
|
||||||
|
importedRevision = 'rev-101291';
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
deleteYomitanDictionary: async () => true,
|
||||||
|
upsertYomitanDictionarySettings: async () => true,
|
||||||
|
now: () => 1000,
|
||||||
|
onSyncStatus: (event) => {
|
||||||
|
events.push(event);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await runtime.runSyncNow();
|
||||||
|
|
||||||
|
assert.deepEqual(events, [
|
||||||
|
{
|
||||||
|
phase: 'checking',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Checking character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
phase: 'generating',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Generating character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
phase: 'syncing',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Updating character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
phase: 'building',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Building character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
phase: 'importing',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Importing character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
phase: 'ready',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Character dictionary ready for Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
changed: true,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('auto sync emits checking before snapshot resolves and skips generating on cache hit', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const events: Array<{
|
||||||
|
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
|
||||||
|
mediaId?: number;
|
||||||
|
mediaTitle?: string;
|
||||||
|
message: string;
|
||||||
|
changed?: boolean;
|
||||||
|
}> = [];
|
||||||
|
const snapshotDeferred = createDeferred<{
|
||||||
|
mediaId: number;
|
||||||
|
mediaTitle: string;
|
||||||
|
entryCount: number;
|
||||||
|
fromCache: boolean;
|
||||||
|
updatedAt: number;
|
||||||
|
}>();
|
||||||
|
let importedRevision: string | null = null;
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getConfig: () => ({
|
||||||
|
enabled: true,
|
||||||
|
maxLoaded: 3,
|
||||||
|
profileScope: 'all',
|
||||||
|
}),
|
||||||
|
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
|
||||||
|
progress?.onChecking?.({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
});
|
||||||
|
return await snapshotDeferred.promise;
|
||||||
|
},
|
||||||
|
buildMergedDictionary: async () => ({
|
||||||
|
zipPath: '/tmp/merged.zip',
|
||||||
|
revision: 'rev-101291',
|
||||||
|
dictionaryTitle: 'SubMiner Character Dictionary',
|
||||||
|
entryCount: 2560,
|
||||||
|
}),
|
||||||
|
getYomitanDictionaryInfo: async () =>
|
||||||
|
importedRevision
|
||||||
|
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
|
||||||
|
: [],
|
||||||
|
importYomitanDictionary: async () => {
|
||||||
|
importedRevision = 'rev-101291';
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
deleteYomitanDictionary: async () => true,
|
||||||
|
upsertYomitanDictionarySettings: async () => true,
|
||||||
|
now: () => 1000,
|
||||||
|
onSyncStatus: (event) => {
|
||||||
|
events.push(event);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const syncPromise = runtime.runSyncNow();
|
||||||
|
await Promise.resolve();
|
||||||
|
|
||||||
|
assert.deepEqual(events, [
|
||||||
|
{
|
||||||
|
phase: 'checking',
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
message: 'Checking character dictionary for Rascal Does Not Dream of Bunny Girl Senpai...',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
|
||||||
|
snapshotDeferred.resolve({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
entryCount: 2560,
|
||||||
|
fromCache: true,
|
||||||
|
updatedAt: 1000,
|
||||||
|
});
|
||||||
|
await syncPromise;
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
events.some((event) => event.phase === 'generating'),
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('auto sync emits building while merged dictionary generation is in flight', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const events: Array<{
|
||||||
|
phase: 'checking' | 'generating' | 'building' | 'syncing' | 'importing' | 'ready' | 'failed';
|
||||||
|
mediaId?: number;
|
||||||
|
mediaTitle?: string;
|
||||||
|
message: string;
|
||||||
|
changed?: boolean;
|
||||||
|
}> = [];
|
||||||
|
const buildDeferred = createDeferred<{
|
||||||
|
zipPath: string;
|
||||||
|
revision: string;
|
||||||
|
dictionaryTitle: string;
|
||||||
|
entryCount: number;
|
||||||
|
}>();
|
||||||
|
let importedRevision: string | null = null;
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getConfig: () => ({
|
||||||
|
enabled: true,
|
||||||
|
maxLoaded: 3,
|
||||||
|
profileScope: 'all',
|
||||||
|
}),
|
||||||
|
getOrCreateCurrentSnapshot: async (_targetPath, progress) => {
|
||||||
|
progress?.onChecking?.({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
entryCount: 2560,
|
||||||
|
fromCache: true,
|
||||||
|
updatedAt: 1000,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
buildMergedDictionary: async () => await buildDeferred.promise,
|
||||||
|
getYomitanDictionaryInfo: async () =>
|
||||||
|
importedRevision
|
||||||
|
? [{ title: 'SubMiner Character Dictionary', revision: importedRevision }]
|
||||||
|
: [],
|
||||||
|
importYomitanDictionary: async () => {
|
||||||
|
importedRevision = 'rev-101291';
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
deleteYomitanDictionary: async () => true,
|
||||||
|
upsertYomitanDictionarySettings: async () => true,
|
||||||
|
now: () => 1000,
|
||||||
|
onSyncStatus: (event) => {
|
||||||
|
events.push(event);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const syncPromise = runtime.runSyncNow();
|
||||||
|
await Promise.resolve();
|
||||||
|
|
||||||
|
assert.equal(
|
||||||
|
events.some((event) => event.phase === 'building'),
|
||||||
|
true,
|
||||||
|
);
|
||||||
|
|
||||||
|
buildDeferred.resolve({
|
||||||
|
zipPath: '/tmp/merged.zip',
|
||||||
|
revision: 'rev-101291',
|
||||||
|
dictionaryTitle: 'SubMiner Character Dictionary',
|
||||||
|
entryCount: 2560,
|
||||||
|
});
|
||||||
|
await syncPromise;
|
||||||
|
});
|
||||||
|
|
||||||
|
test('auto sync waits for tokenization-ready gate before Yomitan mutations', async () => {
|
||||||
|
const userDataPath = makeTempDir();
|
||||||
|
const gate = (() => {
|
||||||
|
let resolve!: () => void;
|
||||||
|
const promise = new Promise<void>((nextResolve) => {
|
||||||
|
resolve = nextResolve;
|
||||||
|
});
|
||||||
|
return { promise, resolve };
|
||||||
|
})();
|
||||||
|
const calls: string[] = [];
|
||||||
|
|
||||||
|
const runtime = createCharacterDictionaryAutoSyncRuntimeService({
|
||||||
|
userDataPath,
|
||||||
|
getConfig: () => ({
|
||||||
|
enabled: true,
|
||||||
|
maxLoaded: 3,
|
||||||
|
profileScope: 'all',
|
||||||
|
}),
|
||||||
|
getOrCreateCurrentSnapshot: async () => ({
|
||||||
|
mediaId: 101291,
|
||||||
|
mediaTitle: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||||
|
entryCount: 2560,
|
||||||
|
fromCache: false,
|
||||||
|
updatedAt: 1000,
|
||||||
|
}),
|
||||||
|
buildMergedDictionary: async () => {
|
||||||
|
calls.push('build');
|
||||||
|
return {
|
||||||
|
zipPath: '/tmp/merged.zip',
|
||||||
|
revision: 'rev-101291',
|
||||||
|
dictionaryTitle: 'SubMiner Character Dictionary',
|
||||||
|
entryCount: 2560,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
waitForYomitanMutationReady: async () => {
|
||||||
|
calls.push('wait');
|
||||||
|
await gate.promise;
|
||||||
|
},
|
||||||
|
getYomitanDictionaryInfo: async () => {
|
||||||
|
calls.push('info');
|
||||||
|
return [];
|
||||||
|
},
|
||||||
|
importYomitanDictionary: async () => {
|
||||||
|
calls.push('import');
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
deleteYomitanDictionary: async () => true,
|
||||||
|
upsertYomitanDictionarySettings: async () => {
|
||||||
|
calls.push('settings');
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
now: () => 1000,
|
||||||
|
});
|
||||||
|
|
||||||
|
const syncPromise = runtime.runSyncNow();
|
||||||
|
await Promise.resolve();
|
||||||
|
await Promise.resolve();
|
||||||
|
|
||||||
|
assert.deepEqual(calls, ['build', 'wait']);
|
||||||
|
|
||||||
|
gate.resolve();
|
||||||
|
await syncPromise;
|
||||||
|
|
||||||
|
assert.deepEqual(calls, ['build', 'wait', 'info', 'import', 'settings']);
|
||||||
|
});
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import * as fs from 'fs';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import type { AnilistCharacterDictionaryProfileScope } from '../../types';
|
import type { AnilistCharacterDictionaryProfileScope } from '../../types';
|
||||||
import type {
|
import type {
|
||||||
|
CharacterDictionarySnapshotProgressCallbacks,
|
||||||
CharacterDictionarySnapshotResult,
|
CharacterDictionarySnapshotResult,
|
||||||
MergedCharacterDictionaryBuildResult,
|
MergedCharacterDictionaryBuildResult,
|
||||||
} from '../character-dictionary-runtime';
|
} from '../character-dictionary-runtime';
|
||||||
@@ -23,11 +24,23 @@ export interface CharacterDictionaryAutoSyncConfig {
|
|||||||
profileScope: AnilistCharacterDictionaryProfileScope;
|
profileScope: AnilistCharacterDictionaryProfileScope;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface CharacterDictionaryAutoSyncStatusEvent {
|
||||||
|
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
|
||||||
|
mediaId?: number;
|
||||||
|
mediaTitle?: string;
|
||||||
|
message: string;
|
||||||
|
changed?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface CharacterDictionaryAutoSyncRuntimeDeps {
|
export interface CharacterDictionaryAutoSyncRuntimeDeps {
|
||||||
userDataPath: string;
|
userDataPath: string;
|
||||||
getConfig: () => CharacterDictionaryAutoSyncConfig;
|
getConfig: () => CharacterDictionaryAutoSyncConfig;
|
||||||
getOrCreateCurrentSnapshot: (targetPath?: string) => Promise<CharacterDictionarySnapshotResult>;
|
getOrCreateCurrentSnapshot: (
|
||||||
|
targetPath?: string,
|
||||||
|
progress?: CharacterDictionarySnapshotProgressCallbacks,
|
||||||
|
) => Promise<CharacterDictionarySnapshotResult>;
|
||||||
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
|
buildMergedDictionary: (mediaIds: number[]) => Promise<MergedCharacterDictionaryBuildResult>;
|
||||||
|
waitForYomitanMutationReady?: () => Promise<void>;
|
||||||
getYomitanDictionaryInfo: () => Promise<AutoSyncDictionaryInfo[]>;
|
getYomitanDictionaryInfo: () => Promise<AutoSyncDictionaryInfo[]>;
|
||||||
importYomitanDictionary: (zipPath: string) => Promise<boolean>;
|
importYomitanDictionary: (zipPath: string) => Promise<boolean>;
|
||||||
deleteYomitanDictionary: (dictionaryTitle: string) => Promise<boolean>;
|
deleteYomitanDictionary: (dictionaryTitle: string) => Promise<boolean>;
|
||||||
@@ -41,6 +54,8 @@ export interface CharacterDictionaryAutoSyncRuntimeDeps {
|
|||||||
operationTimeoutMs?: number;
|
operationTimeoutMs?: number;
|
||||||
logInfo?: (message: string) => void;
|
logInfo?: (message: string) => void;
|
||||||
logWarn?: (message: string) => void;
|
logWarn?: (message: string) => void;
|
||||||
|
onSyncStatus?: (event: CharacterDictionaryAutoSyncStatusEvent) => void;
|
||||||
|
onSyncComplete?: (result: { mediaId: number; mediaTitle: string; changed: boolean }) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
function ensureDir(dirPath: string): void {
|
function ensureDir(dirPath: string): void {
|
||||||
@@ -92,6 +107,37 @@ function arraysEqual(left: number[], right: number[]): boolean {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function buildSyncingMessage(mediaTitle: string): string {
|
||||||
|
return `Updating character dictionary for ${mediaTitle}...`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildCheckingMessage(mediaTitle: string): string {
|
||||||
|
return `Checking character dictionary for ${mediaTitle}...`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildGeneratingMessage(mediaTitle: string): string {
|
||||||
|
return `Generating character dictionary for ${mediaTitle}...`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildImportingMessage(mediaTitle: string): string {
|
||||||
|
return `Importing character dictionary for ${mediaTitle}...`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildBuildingMessage(mediaTitle: string): string {
|
||||||
|
return `Building character dictionary for ${mediaTitle}...`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildReadyMessage(mediaTitle: string): string {
|
||||||
|
return `Character dictionary ready for ${mediaTitle}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildFailedMessage(mediaTitle: string | null, errorMessage: string): string {
|
||||||
|
if (mediaTitle) {
|
||||||
|
return `Character dictionary sync failed for ${mediaTitle}: ${errorMessage}`;
|
||||||
|
}
|
||||||
|
return `Character dictionary sync failed: ${errorMessage}`;
|
||||||
|
}
|
||||||
|
|
||||||
export function createCharacterDictionaryAutoSyncRuntimeService(
|
export function createCharacterDictionaryAutoSyncRuntimeService(
|
||||||
deps: CharacterDictionaryAutoSyncRuntimeDeps,
|
deps: CharacterDictionaryAutoSyncRuntimeDeps,
|
||||||
): {
|
): {
|
||||||
@@ -133,8 +179,41 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let currentMediaId: number | undefined;
|
||||||
|
let currentMediaTitle: string | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
deps.logInfo?.('[dictionary:auto-sync] syncing current anime snapshot');
|
deps.logInfo?.('[dictionary:auto-sync] syncing current anime snapshot');
|
||||||
const snapshot = await deps.getOrCreateCurrentSnapshot();
|
const snapshot = await deps.getOrCreateCurrentSnapshot(undefined, {
|
||||||
|
onChecking: ({ mediaId, mediaTitle }) => {
|
||||||
|
currentMediaId = mediaId;
|
||||||
|
currentMediaTitle = mediaTitle;
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'checking',
|
||||||
|
mediaId,
|
||||||
|
mediaTitle,
|
||||||
|
message: buildCheckingMessage(mediaTitle),
|
||||||
|
});
|
||||||
|
},
|
||||||
|
onGenerating: ({ mediaId, mediaTitle }) => {
|
||||||
|
currentMediaId = mediaId;
|
||||||
|
currentMediaTitle = mediaTitle;
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'generating',
|
||||||
|
mediaId,
|
||||||
|
mediaTitle,
|
||||||
|
message: buildGeneratingMessage(mediaTitle),
|
||||||
|
});
|
||||||
|
},
|
||||||
|
});
|
||||||
|
currentMediaId = snapshot.mediaId;
|
||||||
|
currentMediaTitle = snapshot.mediaTitle;
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'syncing',
|
||||||
|
mediaId: snapshot.mediaId,
|
||||||
|
mediaTitle: snapshot.mediaTitle,
|
||||||
|
message: buildSyncingMessage(snapshot.mediaTitle),
|
||||||
|
});
|
||||||
const state = readAutoSyncState(statePath);
|
const state = readAutoSyncState(statePath);
|
||||||
const nextActiveMediaIds = [
|
const nextActiveMediaIds = [
|
||||||
snapshot.mediaId,
|
snapshot.mediaId,
|
||||||
@@ -152,6 +231,12 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
!state.mergedDictionaryTitle ||
|
!state.mergedDictionaryTitle ||
|
||||||
!snapshot.fromCache
|
!snapshot.fromCache
|
||||||
) {
|
) {
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'building',
|
||||||
|
mediaId: snapshot.mediaId,
|
||||||
|
mediaTitle: snapshot.mediaTitle,
|
||||||
|
message: buildBuildingMessage(snapshot.mediaTitle),
|
||||||
|
});
|
||||||
deps.logInfo?.('[dictionary:auto-sync] rebuilding merged dictionary for active anime set');
|
deps.logInfo?.('[dictionary:auto-sync] rebuilding merged dictionary for active anime set');
|
||||||
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
|
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
|
||||||
}
|
}
|
||||||
@@ -162,6 +247,8 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
throw new Error('Merged character dictionary state is incomplete.');
|
throw new Error('Merged character dictionary state is incomplete.');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
await deps.waitForYomitanMutationReady?.();
|
||||||
|
|
||||||
const dictionaryInfo = await withOperationTimeout(
|
const dictionaryInfo = await withOperationTimeout(
|
||||||
'getYomitanDictionaryInfo',
|
'getYomitanDictionaryInfo',
|
||||||
deps.getYomitanDictionaryInfo(),
|
deps.getYomitanDictionaryInfo(),
|
||||||
@@ -176,8 +263,15 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
existing === null ||
|
existing === null ||
|
||||||
existingRevision === null ||
|
existingRevision === null ||
|
||||||
existingRevision !== revision;
|
existingRevision !== revision;
|
||||||
|
let changed = merged !== null;
|
||||||
|
|
||||||
if (shouldImport) {
|
if (shouldImport) {
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'importing',
|
||||||
|
mediaId: snapshot.mediaId,
|
||||||
|
mediaTitle: snapshot.mediaTitle,
|
||||||
|
message: buildImportingMessage(snapshot.mediaTitle),
|
||||||
|
});
|
||||||
if (existing !== null) {
|
if (existing !== null) {
|
||||||
await withOperationTimeout(
|
await withOperationTimeout(
|
||||||
`deleteYomitanDictionary(${dictionaryTitle})`,
|
`deleteYomitanDictionary(${dictionaryTitle})`,
|
||||||
@@ -195,13 +289,15 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
if (!imported) {
|
if (!imported) {
|
||||||
throw new Error(`Failed to import dictionary ZIP: ${merged.zipPath}`);
|
throw new Error(`Failed to import dictionary ZIP: ${merged.zipPath}`);
|
||||||
}
|
}
|
||||||
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
deps.logInfo?.(`[dictionary:auto-sync] applying Yomitan settings for ${dictionaryTitle}`);
|
deps.logInfo?.(`[dictionary:auto-sync] applying Yomitan settings for ${dictionaryTitle}`);
|
||||||
await withOperationTimeout(
|
const settingsUpdated = await withOperationTimeout(
|
||||||
`upsertYomitanDictionarySettings(${dictionaryTitle})`,
|
`upsertYomitanDictionarySettings(${dictionaryTitle})`,
|
||||||
deps.upsertYomitanDictionarySettings(dictionaryTitle, config.profileScope),
|
deps.upsertYomitanDictionarySettings(dictionaryTitle, config.profileScope),
|
||||||
);
|
);
|
||||||
|
changed = changed || settingsUpdated === true;
|
||||||
|
|
||||||
writeAutoSyncState(statePath, {
|
writeAutoSyncState(statePath, {
|
||||||
activeMediaIds: nextActiveMediaIds,
|
activeMediaIds: nextActiveMediaIds,
|
||||||
@@ -211,6 +307,28 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
|
|||||||
deps.logInfo?.(
|
deps.logInfo?.(
|
||||||
`[dictionary:auto-sync] synced AniList ${snapshot.mediaId}: ${dictionaryTitle} (${snapshot.entryCount} entries)`,
|
`[dictionary:auto-sync] synced AniList ${snapshot.mediaId}: ${dictionaryTitle} (${snapshot.entryCount} entries)`,
|
||||||
);
|
);
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'ready',
|
||||||
|
mediaId: snapshot.mediaId,
|
||||||
|
mediaTitle: snapshot.mediaTitle,
|
||||||
|
message: buildReadyMessage(snapshot.mediaTitle),
|
||||||
|
changed,
|
||||||
|
});
|
||||||
|
deps.onSyncComplete?.({
|
||||||
|
mediaId: snapshot.mediaId,
|
||||||
|
mediaTitle: snapshot.mediaTitle,
|
||||||
|
changed,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
const errorMessage = (error as Error)?.message ?? String(error);
|
||||||
|
deps.onSyncStatus?.({
|
||||||
|
phase: 'failed',
|
||||||
|
mediaId: currentMediaId,
|
||||||
|
mediaTitle: currentMediaTitle ?? undefined,
|
||||||
|
message: buildFailedMessage(currentMediaTitle, errorMessage),
|
||||||
|
});
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const enqueueSync = (): void => {
|
const enqueueSync = (): void => {
|
||||||
|
|||||||
@@ -25,7 +25,12 @@ test('createConfigHotReloadAppliedHandler runs all hot-reload effects', () => {
|
|||||||
|
|
||||||
applyHotReload(
|
applyHotReload(
|
||||||
{
|
{
|
||||||
hotReloadFields: ['shortcuts', 'secondarySub.defaultMode', 'ankiConnect.ai'],
|
hotReloadFields: [
|
||||||
|
'shortcuts',
|
||||||
|
'secondarySub.defaultMode',
|
||||||
|
'ankiConnect.ai',
|
||||||
|
'subtitleStyle.autoPauseVideoOnHover',
|
||||||
|
],
|
||||||
restartRequiredFields: [],
|
restartRequiredFields: [],
|
||||||
},
|
},
|
||||||
config,
|
config,
|
||||||
|
|||||||
42
src/main/runtime/current-media-tokenization-gate.test.ts
Normal file
42
src/main/runtime/current-media-tokenization-gate.test.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import test from 'node:test';
|
||||||
|
import { createCurrentMediaTokenizationGate } from './current-media-tokenization-gate';
|
||||||
|
|
||||||
|
test('current media tokenization gate waits until current path is marked ready', async () => {
|
||||||
|
const gate = createCurrentMediaTokenizationGate();
|
||||||
|
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
|
||||||
|
|
||||||
|
let resolved = false;
|
||||||
|
const waitPromise = gate.waitUntilReady('/tmp/video-1.mkv').then(() => {
|
||||||
|
resolved = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
await Promise.resolve();
|
||||||
|
assert.equal(resolved, false);
|
||||||
|
|
||||||
|
gate.markReady('/tmp/video-1.mkv');
|
||||||
|
await waitPromise;
|
||||||
|
assert.equal(resolved, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('current media tokenization gate resolves old waiters when media changes', async () => {
|
||||||
|
const gate = createCurrentMediaTokenizationGate();
|
||||||
|
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
|
||||||
|
|
||||||
|
let resolved = false;
|
||||||
|
const waitPromise = gate.waitUntilReady('/tmp/video-1.mkv').then(() => {
|
||||||
|
resolved = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
gate.updateCurrentMediaPath('/tmp/video-2.mkv');
|
||||||
|
await waitPromise;
|
||||||
|
assert.equal(resolved, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('current media tokenization gate returns immediately for ready media', async () => {
|
||||||
|
const gate = createCurrentMediaTokenizationGate();
|
||||||
|
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
|
||||||
|
gate.markReady('/tmp/video-1.mkv');
|
||||||
|
|
||||||
|
await gate.waitUntilReady('/tmp/video-1.mkv');
|
||||||
|
});
|
||||||
70
src/main/runtime/current-media-tokenization-gate.ts
Normal file
70
src/main/runtime/current-media-tokenization-gate.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
function normalizeMediaPath(mediaPath: string | null | undefined): string | null {
|
||||||
|
if (typeof mediaPath !== 'string') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
const trimmed = mediaPath.trim();
|
||||||
|
return trimmed.length > 0 ? trimmed : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createCurrentMediaTokenizationGate(): {
|
||||||
|
updateCurrentMediaPath: (mediaPath: string | null | undefined) => void;
|
||||||
|
markReady: (mediaPath: string | null | undefined) => void;
|
||||||
|
waitUntilReady: (mediaPath: string | null | undefined) => Promise<void>;
|
||||||
|
} {
|
||||||
|
let currentMediaPath: string | null = null;
|
||||||
|
let readyMediaPath: string | null = null;
|
||||||
|
let pendingMediaPath: string | null = null;
|
||||||
|
let pendingPromise: Promise<void> | null = null;
|
||||||
|
let resolvePending: (() => void) | null = null;
|
||||||
|
|
||||||
|
const resolvePendingWaiter = (): void => {
|
||||||
|
resolvePending?.();
|
||||||
|
resolvePending = null;
|
||||||
|
pendingPromise = null;
|
||||||
|
pendingMediaPath = null;
|
||||||
|
};
|
||||||
|
|
||||||
|
const ensurePendingPromise = (mediaPath: string): Promise<void> => {
|
||||||
|
if (pendingMediaPath === mediaPath && pendingPromise) {
|
||||||
|
return pendingPromise;
|
||||||
|
}
|
||||||
|
resolvePendingWaiter();
|
||||||
|
pendingMediaPath = mediaPath;
|
||||||
|
pendingPromise = new Promise<void>((resolve) => {
|
||||||
|
resolvePending = resolve;
|
||||||
|
});
|
||||||
|
return pendingPromise;
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
updateCurrentMediaPath: (mediaPath) => {
|
||||||
|
const normalizedPath = normalizeMediaPath(mediaPath);
|
||||||
|
if (normalizedPath === currentMediaPath) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
currentMediaPath = normalizedPath;
|
||||||
|
readyMediaPath = null;
|
||||||
|
resolvePendingWaiter();
|
||||||
|
if (normalizedPath) {
|
||||||
|
ensurePendingPromise(normalizedPath);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
markReady: (mediaPath) => {
|
||||||
|
const normalizedPath = normalizeMediaPath(mediaPath);
|
||||||
|
if (!normalizedPath) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
readyMediaPath = normalizedPath;
|
||||||
|
if (pendingMediaPath === normalizedPath) {
|
||||||
|
resolvePendingWaiter();
|
||||||
|
}
|
||||||
|
},
|
||||||
|
waitUntilReady: async (mediaPath) => {
|
||||||
|
const normalizedPath = normalizeMediaPath(mediaPath) ?? currentMediaPath;
|
||||||
|
if (!normalizedPath || readyMediaPath === normalizedPath) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
await ensurePendingPromise(normalizedPath);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -16,12 +16,14 @@ test('overlay window factory main deps builders return mapped handlers', () => {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
|
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
|
||||||
isOverlayVisible: (kind) => kind === 'visible',
|
isOverlayVisible: (kind) => kind === 'visible',
|
||||||
tryHandleOverlayShortcutLocalFallback: () => false,
|
tryHandleOverlayShortcutLocalFallback: () => false,
|
||||||
|
forwardTabToMpv: () => calls.push('forward-tab'),
|
||||||
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
const overlayDeps = buildOverlayDeps();
|
const overlayDeps = buildOverlayDeps();
|
||||||
assert.equal(overlayDeps.isDev, true);
|
assert.equal(overlayDeps.isDev, true);
|
||||||
assert.equal(overlayDeps.isOverlayVisible('visible'), true);
|
assert.equal(overlayDeps.isOverlayVisible('visible'), true);
|
||||||
|
overlayDeps.forwardTabToMpv();
|
||||||
|
|
||||||
const buildMainDeps = createBuildCreateMainWindowMainDepsHandler({
|
const buildMainDeps = createBuildCreateMainWindowMainDepsHandler({
|
||||||
createOverlayWindow: () => ({ id: 'visible' }),
|
createOverlayWindow: () => ({ id: 'visible' }),
|
||||||
@@ -37,5 +39,5 @@ test('overlay window factory main deps builders return mapped handlers', () => {
|
|||||||
const modalDeps = buildModalDeps();
|
const modalDeps = buildModalDeps();
|
||||||
modalDeps.setModalWindow(null);
|
modalDeps.setModalWindow(null);
|
||||||
|
|
||||||
assert.deepEqual(calls, ['set-main', 'set-modal']);
|
assert.deepEqual(calls, ['forward-tab', 'set-main', 'set-modal']);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
||||||
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
|
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
|
||||||
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
|
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
|
||||||
},
|
},
|
||||||
) => TWindow;
|
) => TWindow;
|
||||||
@@ -17,6 +18,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
||||||
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
|
isOverlayVisible: (windowKind: 'visible' | 'modal') => boolean;
|
||||||
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
|
onWindowClosed: (windowKind: 'visible' | 'modal') => void;
|
||||||
}) {
|
}) {
|
||||||
return () => ({
|
return () => ({
|
||||||
@@ -27,6 +29,7 @@ export function createBuildCreateOverlayWindowMainDepsHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
|
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
|
||||||
isOverlayVisible: deps.isOverlayVisible,
|
isOverlayVisible: deps.isOverlayVisible,
|
||||||
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
|
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
|
||||||
|
forwardTabToMpv: deps.forwardTabToMpv,
|
||||||
onWindowClosed: deps.onWindowClosed,
|
onWindowClosed: deps.onWindowClosed,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,6 +15,7 @@ test('create overlay window handler forwards options and kind', () => {
|
|||||||
assert.equal(options.isDev, true);
|
assert.equal(options.isDev, true);
|
||||||
assert.equal(options.isOverlayVisible('visible'), true);
|
assert.equal(options.isOverlayVisible('visible'), true);
|
||||||
assert.equal(options.isOverlayVisible('modal'), false);
|
assert.equal(options.isOverlayVisible('modal'), false);
|
||||||
|
options.forwardTabToMpv();
|
||||||
options.onRuntimeOptionsChanged();
|
options.onRuntimeOptionsChanged();
|
||||||
options.setOverlayDebugVisualizationEnabled(true);
|
options.setOverlayDebugVisualizationEnabled(true);
|
||||||
options.onWindowClosed(kind);
|
options.onWindowClosed(kind);
|
||||||
@@ -26,11 +27,18 @@ test('create overlay window handler forwards options and kind', () => {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
|
setOverlayDebugVisualizationEnabled: (enabled) => calls.push(`debug:${enabled}`),
|
||||||
isOverlayVisible: (kind) => kind === 'visible',
|
isOverlayVisible: (kind) => kind === 'visible',
|
||||||
tryHandleOverlayShortcutLocalFallback: () => false,
|
tryHandleOverlayShortcutLocalFallback: () => false,
|
||||||
|
forwardTabToMpv: () => calls.push('forward-tab'),
|
||||||
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
assert.equal(createOverlayWindow('visible'), window);
|
assert.equal(createOverlayWindow('visible'), window);
|
||||||
assert.deepEqual(calls, ['kind:visible', 'runtime-options', 'debug:true', 'closed:visible']);
|
assert.deepEqual(calls, [
|
||||||
|
'kind:visible',
|
||||||
|
'forward-tab',
|
||||||
|
'runtime-options',
|
||||||
|
'debug:true',
|
||||||
|
'closed:visible',
|
||||||
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('create main window handler stores visible window', () => {
|
test('create main window handler stores visible window', () => {
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
||||||
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
|
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
|
||||||
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
onWindowClosed: (windowKind: OverlayWindowKind) => void;
|
onWindowClosed: (windowKind: OverlayWindowKind) => void;
|
||||||
},
|
},
|
||||||
) => TWindow;
|
) => TWindow;
|
||||||
@@ -19,6 +20,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
setOverlayDebugVisualizationEnabled: (enabled: boolean) => void;
|
||||||
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
|
isOverlayVisible: (windowKind: OverlayWindowKind) => boolean;
|
||||||
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
tryHandleOverlayShortcutLocalFallback: (input: Electron.Input) => boolean;
|
||||||
|
forwardTabToMpv: () => void;
|
||||||
onWindowClosed: (windowKind: OverlayWindowKind) => void;
|
onWindowClosed: (windowKind: OverlayWindowKind) => void;
|
||||||
}) {
|
}) {
|
||||||
return (kind: OverlayWindowKind): TWindow => {
|
return (kind: OverlayWindowKind): TWindow => {
|
||||||
@@ -29,6 +31,7 @@ export function createCreateOverlayWindowHandler<TWindow>(deps: {
|
|||||||
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
|
setOverlayDebugVisualizationEnabled: deps.setOverlayDebugVisualizationEnabled,
|
||||||
isOverlayVisible: deps.isOverlayVisible,
|
isOverlayVisible: deps.isOverlayVisible,
|
||||||
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
|
tryHandleOverlayShortcutLocalFallback: deps.tryHandleOverlayShortcutLocalFallback,
|
||||||
|
forwardTabToMpv: deps.forwardTabToMpv,
|
||||||
onWindowClosed: deps.onWindowClosed,
|
onWindowClosed: deps.onWindowClosed,
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ test('overlay window runtime handlers compose create/main/modal handlers', () =>
|
|||||||
},
|
},
|
||||||
isOverlayVisible: (kind) => kind === 'visible',
|
isOverlayVisible: (kind) => kind === 'visible',
|
||||||
tryHandleOverlayShortcutLocalFallback: () => false,
|
tryHandleOverlayShortcutLocalFallback: () => false,
|
||||||
|
forwardTabToMpv: () => calls.push('forward-tab'),
|
||||||
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
onWindowClosed: (kind) => calls.push(`closed:${kind}`),
|
||||||
},
|
},
|
||||||
setMainWindow: (window) => {
|
setMainWindow: (window) => {
|
||||||
|
|||||||
159
src/main/runtime/startup-osd-sequencer.test.ts
Normal file
159
src/main/runtime/startup-osd-sequencer.test.ts
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import test from 'node:test';
|
||||||
|
import {
|
||||||
|
createStartupOsdSequencer,
|
||||||
|
type StartupOsdSequencerCharacterDictionaryEvent,
|
||||||
|
} from './startup-osd-sequencer';
|
||||||
|
|
||||||
|
function makeDictionaryEvent(
|
||||||
|
phase: StartupOsdSequencerCharacterDictionaryEvent['phase'],
|
||||||
|
message: string,
|
||||||
|
): StartupOsdSequencerCharacterDictionaryEvent {
|
||||||
|
return {
|
||||||
|
phase,
|
||||||
|
message,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
test('startup OSD keeps dictionary progress hidden until tokenization and annotation loading finish', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
sequencer.showAnnotationLoading('Loading subtitle annotations |');
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, ['Loading subtitle annotations |']);
|
||||||
|
|
||||||
|
sequencer.showAnnotationLoading('Loading subtitle annotations /');
|
||||||
|
assert.deepEqual(osdMessages, [
|
||||||
|
'Loading subtitle annotations |',
|
||||||
|
'Loading subtitle annotations /',
|
||||||
|
]);
|
||||||
|
|
||||||
|
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
|
||||||
|
assert.deepEqual(osdMessages, [
|
||||||
|
'Loading subtitle annotations |',
|
||||||
|
'Loading subtitle annotations /',
|
||||||
|
'Updating character dictionary for Frieren...',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('startup OSD buffers checking behind annotations and replaces it with later generating progress', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('checking', 'Checking character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
sequencer.showAnnotationLoading('Loading subtitle annotations |');
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('generating', 'Generating character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, ['Loading subtitle annotations |']);
|
||||||
|
|
||||||
|
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, [
|
||||||
|
'Loading subtitle annotations |',
|
||||||
|
'Generating character dictionary for Frieren...',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('startup OSD replaces earlier dictionary progress with later building progress', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
sequencer.showAnnotationLoading('Loading subtitle annotations |');
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('building', 'Building character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
|
||||||
|
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, [
|
||||||
|
'Loading subtitle annotations |',
|
||||||
|
'Building character dictionary for Frieren...',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('startup OSD skips buffered dictionary ready messages when progress completed before it became visible', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('ready', 'Character dictionary ready for Frieren'),
|
||||||
|
);
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, ['Subtitle annotations loaded']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('startup OSD shows dictionary failure after annotation loading completes', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.showAnnotationLoading('Loading subtitle annotations |');
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('failed', 'Character dictionary sync failed for Frieren: boom'),
|
||||||
|
);
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
sequencer.markAnnotationLoadingComplete('Subtitle annotations loaded');
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, [
|
||||||
|
'Loading subtitle annotations |',
|
||||||
|
'Character dictionary sync failed for Frieren: boom',
|
||||||
|
]);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('startup OSD reset requires the next media to wait for tokenization again', () => {
|
||||||
|
const osdMessages: string[] = [];
|
||||||
|
const sequencer = createStartupOsdSequencer({
|
||||||
|
showOsd: (message) => {
|
||||||
|
osdMessages.push(message);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
sequencer.reset();
|
||||||
|
sequencer.notifyCharacterDictionaryStatus(
|
||||||
|
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.deepEqual(osdMessages, []);
|
||||||
|
|
||||||
|
sequencer.markTokenizationReady();
|
||||||
|
assert.deepEqual(osdMessages, ['Updating character dictionary for Frieren...']);
|
||||||
|
});
|
||||||
107
src/main/runtime/startup-osd-sequencer.ts
Normal file
107
src/main/runtime/startup-osd-sequencer.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
export interface StartupOsdSequencerCharacterDictionaryEvent {
|
||||||
|
phase: 'checking' | 'generating' | 'syncing' | 'building' | 'importing' | 'ready' | 'failed';
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function createStartupOsdSequencer(deps: { showOsd: (message: string) => void }): {
|
||||||
|
reset: () => void;
|
||||||
|
markTokenizationReady: () => void;
|
||||||
|
showAnnotationLoading: (message: string) => void;
|
||||||
|
markAnnotationLoadingComplete: (message: string) => void;
|
||||||
|
notifyCharacterDictionaryStatus: (event: StartupOsdSequencerCharacterDictionaryEvent) => void;
|
||||||
|
} {
|
||||||
|
let tokenizationReady = false;
|
||||||
|
let annotationLoadingMessage: string | null = null;
|
||||||
|
let pendingDictionaryProgress: StartupOsdSequencerCharacterDictionaryEvent | null = null;
|
||||||
|
let pendingDictionaryFailure: StartupOsdSequencerCharacterDictionaryEvent | null = null;
|
||||||
|
let dictionaryProgressShown = false;
|
||||||
|
|
||||||
|
const canShowDictionaryStatus = (): boolean =>
|
||||||
|
tokenizationReady && annotationLoadingMessage === null;
|
||||||
|
|
||||||
|
const flushBufferedDictionaryStatus = (): boolean => {
|
||||||
|
if (!canShowDictionaryStatus()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (pendingDictionaryProgress) {
|
||||||
|
deps.showOsd(pendingDictionaryProgress.message);
|
||||||
|
dictionaryProgressShown = true;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (pendingDictionaryFailure) {
|
||||||
|
deps.showOsd(pendingDictionaryFailure.message);
|
||||||
|
pendingDictionaryFailure = null;
|
||||||
|
dictionaryProgressShown = false;
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
reset: () => {
|
||||||
|
tokenizationReady = false;
|
||||||
|
annotationLoadingMessage = null;
|
||||||
|
pendingDictionaryProgress = null;
|
||||||
|
pendingDictionaryFailure = null;
|
||||||
|
dictionaryProgressShown = false;
|
||||||
|
},
|
||||||
|
markTokenizationReady: () => {
|
||||||
|
tokenizationReady = true;
|
||||||
|
if (annotationLoadingMessage !== null) {
|
||||||
|
deps.showOsd(annotationLoadingMessage);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
flushBufferedDictionaryStatus();
|
||||||
|
},
|
||||||
|
showAnnotationLoading: (message) => {
|
||||||
|
annotationLoadingMessage = message;
|
||||||
|
if (tokenizationReady) {
|
||||||
|
deps.showOsd(message);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
markAnnotationLoadingComplete: (message) => {
|
||||||
|
annotationLoadingMessage = null;
|
||||||
|
if (!tokenizationReady) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (flushBufferedDictionaryStatus()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
deps.showOsd(message);
|
||||||
|
},
|
||||||
|
notifyCharacterDictionaryStatus: (event) => {
|
||||||
|
if (
|
||||||
|
event.phase === 'checking' ||
|
||||||
|
event.phase === 'generating' ||
|
||||||
|
event.phase === 'syncing' ||
|
||||||
|
event.phase === 'building' ||
|
||||||
|
event.phase === 'importing'
|
||||||
|
) {
|
||||||
|
pendingDictionaryProgress = event;
|
||||||
|
pendingDictionaryFailure = null;
|
||||||
|
if (canShowDictionaryStatus()) {
|
||||||
|
deps.showOsd(event.message);
|
||||||
|
dictionaryProgressShown = true;
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pendingDictionaryProgress = null;
|
||||||
|
if (event.phase === 'failed') {
|
||||||
|
if (canShowDictionaryStatus()) {
|
||||||
|
deps.showOsd(event.message);
|
||||||
|
} else {
|
||||||
|
pendingDictionaryFailure = event;
|
||||||
|
}
|
||||||
|
dictionaryProgressShown = false;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
pendingDictionaryFailure = null;
|
||||||
|
if (canShowDictionaryStatus() && dictionaryProgressShown) {
|
||||||
|
deps.showOsd(event.message);
|
||||||
|
}
|
||||||
|
dictionaryProgressShown = false;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -80,6 +80,8 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
ensureJlptDictionaryLookup: () => Promise<void>;
|
ensureJlptDictionaryLookup: () => Promise<void>;
|
||||||
ensureFrequencyDictionaryLookup: () => Promise<void>;
|
ensureFrequencyDictionaryLookup: () => Promise<void>;
|
||||||
showMpvOsd?: (message: string) => void;
|
showMpvOsd?: (message: string) => void;
|
||||||
|
showLoadingOsd?: (message: string) => void;
|
||||||
|
showLoadedOsd?: (message: string) => void;
|
||||||
shouldShowOsdNotification?: () => boolean;
|
shouldShowOsdNotification?: () => boolean;
|
||||||
setInterval?: (callback: () => void, delayMs: number) => unknown;
|
setInterval?: (callback: () => void, delayMs: number) => unknown;
|
||||||
clearInterval?: (timer: unknown) => void;
|
clearInterval?: (timer: unknown) => void;
|
||||||
@@ -90,6 +92,8 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
let loadingOsdFrame = 0;
|
let loadingOsdFrame = 0;
|
||||||
let loadingOsdTimer: unknown = null;
|
let loadingOsdTimer: unknown = null;
|
||||||
const showMpvOsd = deps.showMpvOsd;
|
const showMpvOsd = deps.showMpvOsd;
|
||||||
|
const showLoadingOsd = deps.showLoadingOsd ?? showMpvOsd;
|
||||||
|
const showLoadedOsd = deps.showLoadedOsd ?? showMpvOsd;
|
||||||
const setIntervalHandler =
|
const setIntervalHandler =
|
||||||
deps.setInterval ??
|
deps.setInterval ??
|
||||||
((callback: () => void, delayMs: number): unknown => setInterval(callback, delayMs));
|
((callback: () => void, delayMs: number): unknown => setInterval(callback, delayMs));
|
||||||
@@ -99,7 +103,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
const spinnerFrames = ['|', '/', '-', '\\'];
|
const spinnerFrames = ['|', '/', '-', '\\'];
|
||||||
|
|
||||||
const beginLoadingOsd = (): boolean => {
|
const beginLoadingOsd = (): boolean => {
|
||||||
if (!showMpvOsd) {
|
if (!showLoadingOsd) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
loadingOsdDepth += 1;
|
loadingOsdDepth += 1;
|
||||||
@@ -108,13 +112,13 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
}
|
}
|
||||||
|
|
||||||
loadingOsdFrame = 0;
|
loadingOsdFrame = 0;
|
||||||
showMpvOsd(`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame]}`);
|
showLoadingOsd(`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame]}`);
|
||||||
loadingOsdFrame += 1;
|
loadingOsdFrame += 1;
|
||||||
loadingOsdTimer = setIntervalHandler(() => {
|
loadingOsdTimer = setIntervalHandler(() => {
|
||||||
if (!showMpvOsd) {
|
if (!showLoadingOsd) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
showMpvOsd(
|
showLoadingOsd(
|
||||||
`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame % spinnerFrames.length]}`,
|
`Loading subtitle annotations ${spinnerFrames[loadingOsdFrame % spinnerFrames.length]}`,
|
||||||
);
|
);
|
||||||
loadingOsdFrame += 1;
|
loadingOsdFrame += 1;
|
||||||
@@ -123,7 +127,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
};
|
};
|
||||||
|
|
||||||
const endLoadingOsd = (): void => {
|
const endLoadingOsd = (): void => {
|
||||||
if (!showMpvOsd) {
|
if (!showLoadedOsd) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -136,7 +140,7 @@ export function createPrewarmSubtitleDictionariesMainHandler(deps: {
|
|||||||
clearIntervalHandler(loadingOsdTimer);
|
clearIntervalHandler(loadingOsdTimer);
|
||||||
loadingOsdTimer = null;
|
loadingOsdTimer = null;
|
||||||
}
|
}
|
||||||
showMpvOsd('Subtitle annotations loaded');
|
showLoadedOsd('Subtitle annotations loaded');
|
||||||
};
|
};
|
||||||
|
|
||||||
return async (options?: { showLoadingOsd?: boolean }): Promise<void> => {
|
return async (options?: { showLoadingOsd?: boolean }): Promise<void> => {
|
||||||
|
|||||||
@@ -7,11 +7,19 @@ const releaseWorkflowPath = resolve(__dirname, '../.github/workflows/release.yml
|
|||||||
const releaseWorkflow = readFileSync(releaseWorkflowPath, 'utf8');
|
const releaseWorkflow = readFileSync(releaseWorkflowPath, 'utf8');
|
||||||
const makefilePath = resolve(__dirname, '../Makefile');
|
const makefilePath = resolve(__dirname, '../Makefile');
|
||||||
const makefile = readFileSync(makefilePath, 'utf8');
|
const makefile = readFileSync(makefilePath, 'utf8');
|
||||||
|
const packageJsonPath = resolve(__dirname, '../package.json');
|
||||||
|
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as {
|
||||||
|
scripts: Record<string, string>;
|
||||||
|
};
|
||||||
|
|
||||||
test('publish release leaves prerelease unset so gh creates a normal release', () => {
|
test('publish release leaves prerelease unset so gh creates a normal release', () => {
|
||||||
assert.ok(!releaseWorkflow.includes('--prerelease'));
|
assert.ok(!releaseWorkflow.includes('--prerelease'));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('publish release forces an existing draft tag release to become public', () => {
|
||||||
|
assert.ok(releaseWorkflow.includes('--draft=false'));
|
||||||
|
});
|
||||||
|
|
||||||
test('release workflow verifies a committed changelog section before publish', () => {
|
test('release workflow verifies a committed changelog section before publish', () => {
|
||||||
assert.match(releaseWorkflow, /bun run changelog:check/);
|
assert.match(releaseWorkflow, /bun run changelog:check/);
|
||||||
});
|
});
|
||||||
@@ -26,6 +34,22 @@ test('release workflow includes the Windows installer in checksums and uploaded
|
|||||||
assert.match(releaseWorkflow, /artifacts=\([\s\S]*release\/\*\.exe[\s\S]*release\/SHA256SUMS\.txt[\s\S]*\)/);
|
assert.match(releaseWorkflow, /artifacts=\([\s\S]*release\/\*\.exe[\s\S]*release\/SHA256SUMS\.txt[\s\S]*\)/);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('release package scripts disable implicit electron-builder publishing', () => {
|
||||||
|
assert.match(packageJson.scripts['build:appimage'] ?? '', /--publish never/);
|
||||||
|
assert.match(packageJson.scripts['build:mac'] ?? '', /--publish never/);
|
||||||
|
assert.match(packageJson.scripts['build:win'] ?? '', /--publish never/);
|
||||||
|
assert.match(packageJson.scripts['build:win:unsigned'] ?? '', /build-win-unsigned\.mjs/);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('windows release workflow publishes unsigned artifacts directly without SignPath', () => {
|
||||||
|
assert.match(releaseWorkflow, /Build unsigned Windows artifacts/);
|
||||||
|
assert.match(releaseWorkflow, /run: bun run build:win:unsigned/);
|
||||||
|
assert.match(releaseWorkflow, /name: windows/);
|
||||||
|
assert.match(releaseWorkflow, /path: \|\n\s+release\/\*\.exe\n\s+release\/\*\.zip/);
|
||||||
|
assert.ok(!releaseWorkflow.includes('signpath/github-action-submit-signing-request'));
|
||||||
|
assert.ok(!releaseWorkflow.includes('SIGNPATH_'));
|
||||||
|
});
|
||||||
|
|
||||||
test('Makefile routes Windows install-plugin setup through bun and documents Windows builds', () => {
|
test('Makefile routes Windows install-plugin setup through bun and documents Windows builds', () => {
|
||||||
assert.match(makefile, /windows\) printf '%s\\n' "\[INFO\] Windows builds run via: bun run build:win" ;;/);
|
assert.match(makefile, /windows\) printf '%s\\n' "\[INFO\] Windows builds run via: bun run build:win" ;;/);
|
||||||
assert.match(makefile, /bun \.\/scripts\/configure-plugin-binary-path\.mjs/);
|
assert.match(makefile, /bun \.\/scripts\/configure-plugin-binary-path\.mjs/);
|
||||||
|
|||||||
@@ -38,6 +38,7 @@ function createContext(subtitleHeight: number) {
|
|||||||
state: {
|
state: {
|
||||||
currentYPercent: null,
|
currentYPercent: null,
|
||||||
persistedSubtitlePosition: { yPercent: 10 },
|
persistedSubtitlePosition: { yPercent: 10 },
|
||||||
|
isOverSubtitle: false,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -84,6 +84,19 @@ function getNextPersistedPosition(
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function applyMarginBottom(ctx: RendererContext, yPercent: number): void {
|
||||||
|
const clampedPercent = clampYPercent(ctx, yPercent);
|
||||||
|
ctx.state.currentYPercent = clampedPercent;
|
||||||
|
const marginBottom = (clampedPercent / 100) * getViewportHeight();
|
||||||
|
|
||||||
|
ctx.dom.subtitleContainer.style.position = '';
|
||||||
|
ctx.dom.subtitleContainer.style.left = '';
|
||||||
|
ctx.dom.subtitleContainer.style.top = '';
|
||||||
|
ctx.dom.subtitleContainer.style.right = '';
|
||||||
|
ctx.dom.subtitleContainer.style.transform = '';
|
||||||
|
ctx.dom.subtitleContainer.style.marginBottom = `${marginBottom}px`;
|
||||||
|
}
|
||||||
|
|
||||||
export function createInMemorySubtitlePositionController(
|
export function createInMemorySubtitlePositionController(
|
||||||
ctx: RendererContext,
|
ctx: RendererContext,
|
||||||
): SubtitlePositionController {
|
): SubtitlePositionController {
|
||||||
@@ -98,16 +111,7 @@ export function createInMemorySubtitlePositionController(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function applyYPercent(yPercent: number): void {
|
function applyYPercent(yPercent: number): void {
|
||||||
const clampedPercent = clampYPercent(ctx, yPercent);
|
applyMarginBottom(ctx, yPercent);
|
||||||
ctx.state.currentYPercent = clampedPercent;
|
|
||||||
const marginBottom = (clampedPercent / 100) * getViewportHeight();
|
|
||||||
|
|
||||||
ctx.dom.subtitleContainer.style.position = '';
|
|
||||||
ctx.dom.subtitleContainer.style.left = '';
|
|
||||||
ctx.dom.subtitleContainer.style.top = '';
|
|
||||||
ctx.dom.subtitleContainer.style.right = '';
|
|
||||||
ctx.dom.subtitleContainer.style.transform = '';
|
|
||||||
ctx.dom.subtitleContainer.style.marginBottom = `${marginBottom}px`;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function persistSubtitlePositionPatch(patch: Partial<SubtitlePosition>): void {
|
function persistSubtitlePositionPatch(patch: Partial<SubtitlePosition>): void {
|
||||||
|
|||||||
@@ -374,7 +374,8 @@ async function init(): Promise<void> {
|
|||||||
|
|
||||||
await keyboardHandlers.setupMpvInputForwarding();
|
await keyboardHandlers.setupMpvInputForwarding();
|
||||||
|
|
||||||
subtitleRenderer.applySubtitleStyle(await window.electronAPI.getSubtitleStyle());
|
const initialSubtitleStyle = await window.electronAPI.getSubtitleStyle();
|
||||||
|
subtitleRenderer.applySubtitleStyle(initialSubtitleStyle);
|
||||||
|
|
||||||
positioning.applyStoredSubtitlePosition(
|
positioning.applyStoredSubtitlePosition(
|
||||||
await window.electronAPI.getSubtitlePosition(),
|
await window.electronAPI.getSubtitlePosition(),
|
||||||
|
|||||||
108
src/window-trackers/hyprland-tracker.test.ts
Normal file
108
src/window-trackers/hyprland-tracker.test.ts
Normal file
@@ -0,0 +1,108 @@
|
|||||||
|
import test from 'node:test';
|
||||||
|
import assert from 'node:assert/strict';
|
||||||
|
import {
|
||||||
|
parseHyprctlClients,
|
||||||
|
selectHyprlandMpvWindow,
|
||||||
|
type HyprlandClient,
|
||||||
|
} from './hyprland-tracker';
|
||||||
|
|
||||||
|
function makeClient(overrides: Partial<HyprlandClient> = {}): HyprlandClient {
|
||||||
|
return {
|
||||||
|
address: '0x1',
|
||||||
|
class: 'mpv',
|
||||||
|
initialClass: 'mpv',
|
||||||
|
at: [0, 0],
|
||||||
|
size: [1280, 720],
|
||||||
|
mapped: true,
|
||||||
|
hidden: false,
|
||||||
|
...overrides,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
test('selectHyprlandMpvWindow ignores hidden and unmapped mpv clients', () => {
|
||||||
|
const selected = selectHyprlandMpvWindow(
|
||||||
|
[
|
||||||
|
makeClient({
|
||||||
|
address: '0xhidden',
|
||||||
|
hidden: true,
|
||||||
|
}),
|
||||||
|
makeClient({
|
||||||
|
address: '0xunmapped',
|
||||||
|
mapped: false,
|
||||||
|
}),
|
||||||
|
makeClient({
|
||||||
|
address: '0xvisible',
|
||||||
|
at: [100, 200],
|
||||||
|
size: [1920, 1080],
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
{
|
||||||
|
targetMpvSocketPath: null,
|
||||||
|
activeWindowAddress: null,
|
||||||
|
getWindowCommandLine: () => null,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(selected?.address, '0xvisible');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('selectHyprlandMpvWindow prefers active visible window among socket matches', () => {
|
||||||
|
const commandLines = new Map<string, string>([
|
||||||
|
['10', 'mpv --input-ipc-server=/tmp/subminer.sock first.mkv'],
|
||||||
|
['20', 'mpv --input-ipc-server=/tmp/subminer.sock second.mkv'],
|
||||||
|
]);
|
||||||
|
|
||||||
|
const selected = selectHyprlandMpvWindow(
|
||||||
|
[
|
||||||
|
makeClient({
|
||||||
|
address: '0xfirst',
|
||||||
|
pid: 10,
|
||||||
|
}),
|
||||||
|
makeClient({
|
||||||
|
address: '0xsecond',
|
||||||
|
pid: 20,
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
{
|
||||||
|
targetMpvSocketPath: '/tmp/subminer.sock',
|
||||||
|
activeWindowAddress: '0xsecond',
|
||||||
|
getWindowCommandLine: (pid) => commandLines.get(String(pid)) ?? null,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(selected?.address, '0xsecond');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('selectHyprlandMpvWindow matches mpv by initialClass when class is blank', () => {
|
||||||
|
const selected = selectHyprlandMpvWindow(
|
||||||
|
[
|
||||||
|
makeClient({
|
||||||
|
address: '0xinitial',
|
||||||
|
class: '',
|
||||||
|
initialClass: 'mpv',
|
||||||
|
}),
|
||||||
|
],
|
||||||
|
{
|
||||||
|
targetMpvSocketPath: null,
|
||||||
|
activeWindowAddress: null,
|
||||||
|
getWindowCommandLine: () => null,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
|
||||||
|
assert.equal(selected?.address, '0xinitial');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('parseHyprctlClients tolerates non-json prefix output', () => {
|
||||||
|
const clients = parseHyprctlClients(`ok
|
||||||
|
[{"address":"0x1","class":"mpv","initialClass":"mpv","at":[1,2],"size":[3,4]}]`);
|
||||||
|
|
||||||
|
assert.deepEqual(clients, [
|
||||||
|
{
|
||||||
|
address: '0x1',
|
||||||
|
class: 'mpv',
|
||||||
|
initialClass: 'mpv',
|
||||||
|
at: [1, 2],
|
||||||
|
size: [3, 4],
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
});
|
||||||
@@ -23,17 +23,120 @@ import { createLogger } from '../logger';
|
|||||||
|
|
||||||
const log = createLogger('tracker').child('hyprland');
|
const log = createLogger('tracker').child('hyprland');
|
||||||
|
|
||||||
interface HyprlandClient {
|
export interface HyprlandClient {
|
||||||
|
address?: string;
|
||||||
class: string;
|
class: string;
|
||||||
|
initialClass?: string;
|
||||||
at: [number, number];
|
at: [number, number];
|
||||||
size: [number, number];
|
size: [number, number];
|
||||||
pid?: number;
|
pid?: number;
|
||||||
|
mapped?: boolean;
|
||||||
|
hidden?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface SelectHyprlandMpvWindowOptions {
|
||||||
|
targetMpvSocketPath: string | null;
|
||||||
|
activeWindowAddress: string | null;
|
||||||
|
getWindowCommandLine: (pid: number) => string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function extractHyprctlJsonPayload(output: string): string | null {
|
||||||
|
const trimmed = output.trim();
|
||||||
|
if (!trimmed) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const arrayStart = trimmed.indexOf('[');
|
||||||
|
const objectStart = trimmed.indexOf('{');
|
||||||
|
const startCandidates = [arrayStart, objectStart].filter((index) => index >= 0);
|
||||||
|
if (startCandidates.length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const startIndex = Math.min(...startCandidates);
|
||||||
|
return trimmed.slice(startIndex);
|
||||||
|
}
|
||||||
|
|
||||||
|
function matchesTargetSocket(commandLine: string, targetMpvSocketPath: string): boolean {
|
||||||
|
return (
|
||||||
|
commandLine.includes(`--input-ipc-server=${targetMpvSocketPath}`) ||
|
||||||
|
commandLine.includes(`--input-ipc-server ${targetMpvSocketPath}`)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function preferActiveHyprlandWindow(
|
||||||
|
clients: HyprlandClient[],
|
||||||
|
activeWindowAddress: string | null,
|
||||||
|
): HyprlandClient | null {
|
||||||
|
if (activeWindowAddress) {
|
||||||
|
const activeClient = clients.find((client) => client.address === activeWindowAddress);
|
||||||
|
if (activeClient) {
|
||||||
|
return activeClient;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return clients[0] ?? null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isMpvClassName(value: string | undefined): boolean {
|
||||||
|
if (!value) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return value.trim().toLowerCase().includes('mpv');
|
||||||
|
}
|
||||||
|
|
||||||
|
export function selectHyprlandMpvWindow(
|
||||||
|
clients: HyprlandClient[],
|
||||||
|
options: SelectHyprlandMpvWindowOptions,
|
||||||
|
): HyprlandClient | null {
|
||||||
|
const visibleMpvWindows = clients.filter(
|
||||||
|
(client) =>
|
||||||
|
(isMpvClassName(client.class) || isMpvClassName(client.initialClass)) &&
|
||||||
|
client.mapped !== false &&
|
||||||
|
client.hidden !== true,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!options.targetMpvSocketPath) {
|
||||||
|
return preferActiveHyprlandWindow(visibleMpvWindows, options.activeWindowAddress);
|
||||||
|
}
|
||||||
|
const targetMpvSocketPath = options.targetMpvSocketPath;
|
||||||
|
|
||||||
|
const matchingWindows = visibleMpvWindows.filter((client) => {
|
||||||
|
if (!client.pid) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commandLine = options.getWindowCommandLine(client.pid);
|
||||||
|
if (!commandLine) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return matchesTargetSocket(commandLine, targetMpvSocketPath);
|
||||||
|
});
|
||||||
|
|
||||||
|
return preferActiveHyprlandWindow(matchingWindows, options.activeWindowAddress);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parseHyprctlClients(output: string): HyprlandClient[] | null {
|
||||||
|
const jsonPayload = extractHyprctlJsonPayload(output);
|
||||||
|
if (!jsonPayload) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsed = JSON.parse(jsonPayload) as unknown;
|
||||||
|
if (!Array.isArray(parsed)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parsed as HyprlandClient[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export class HyprlandWindowTracker extends BaseWindowTracker {
|
export class HyprlandWindowTracker extends BaseWindowTracker {
|
||||||
private pollInterval: ReturnType<typeof setInterval> | null = null;
|
private pollInterval: ReturnType<typeof setInterval> | null = null;
|
||||||
private eventSocket: net.Socket | null = null;
|
private eventSocket: net.Socket | null = null;
|
||||||
private readonly targetMpvSocketPath: string | null;
|
private readonly targetMpvSocketPath: string | null;
|
||||||
|
private activeWindowAddress: string | null = null;
|
||||||
|
|
||||||
constructor(targetMpvSocketPath?: string) {
|
constructor(targetMpvSocketPath?: string) {
|
||||||
super();
|
super();
|
||||||
@@ -75,15 +178,7 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
|
|||||||
this.eventSocket.on('data', (data: Buffer) => {
|
this.eventSocket.on('data', (data: Buffer) => {
|
||||||
const events = data.toString().split('\n');
|
const events = data.toString().split('\n');
|
||||||
for (const event of events) {
|
for (const event of events) {
|
||||||
if (
|
this.handleSocketEvent(event);
|
||||||
event.includes('movewindow') ||
|
|
||||||
event.includes('windowtitle') ||
|
|
||||||
event.includes('openwindow') ||
|
|
||||||
event.includes('closewindow') ||
|
|
||||||
event.includes('fullscreen')
|
|
||||||
) {
|
|
||||||
this.pollGeometry();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -98,10 +193,47 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
|
|||||||
this.eventSocket.connect(socketPath);
|
this.eventSocket.connect(socketPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private handleSocketEvent(event: string): void {
|
||||||
|
const trimmedEvent = event.trim();
|
||||||
|
if (!trimmedEvent) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const [name, rawData = ''] = trimmedEvent.split('>>', 2);
|
||||||
|
const data = rawData.trim();
|
||||||
|
|
||||||
|
if (name === 'activewindowv2') {
|
||||||
|
this.activeWindowAddress = data || null;
|
||||||
|
this.pollGeometry();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (name === 'closewindow' && data === this.activeWindowAddress) {
|
||||||
|
this.activeWindowAddress = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
name === 'movewindow' ||
|
||||||
|
name === 'movewindowv2' ||
|
||||||
|
name === 'windowtitle' ||
|
||||||
|
name === 'windowtitlev2' ||
|
||||||
|
name === 'openwindow' ||
|
||||||
|
name === 'closewindow' ||
|
||||||
|
name === 'fullscreen' ||
|
||||||
|
name === 'changefloatingmode'
|
||||||
|
) {
|
||||||
|
this.pollGeometry();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private pollGeometry(): void {
|
private pollGeometry(): void {
|
||||||
try {
|
try {
|
||||||
const output = execSync('hyprctl clients -j', { encoding: 'utf-8' });
|
const output = execSync('hyprctl -j clients', { encoding: 'utf-8' });
|
||||||
const clients: HyprlandClient[] = JSON.parse(output);
|
const clients = parseHyprctlClients(output);
|
||||||
|
if (!clients) {
|
||||||
|
this.updateGeometry(null);
|
||||||
|
return;
|
||||||
|
}
|
||||||
const mpvWindow = this.findTargetWindow(clients);
|
const mpvWindow = this.findTargetWindow(clients);
|
||||||
|
|
||||||
if (mpvWindow) {
|
if (mpvWindow) {
|
||||||
@@ -120,30 +252,11 @@ export class HyprlandWindowTracker extends BaseWindowTracker {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private findTargetWindow(clients: HyprlandClient[]): HyprlandClient | null {
|
private findTargetWindow(clients: HyprlandClient[]): HyprlandClient | null {
|
||||||
const mpvWindows = clients.filter((client) => client.class === 'mpv');
|
return selectHyprlandMpvWindow(clients, {
|
||||||
if (!this.targetMpvSocketPath) {
|
targetMpvSocketPath: this.targetMpvSocketPath,
|
||||||
return mpvWindows[0] || null;
|
activeWindowAddress: this.activeWindowAddress,
|
||||||
}
|
getWindowCommandLine: (pid) => this.getWindowCommandLine(pid),
|
||||||
|
});
|
||||||
for (const mpvWindow of mpvWindows) {
|
|
||||||
if (!mpvWindow.pid) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const commandLine = this.getWindowCommandLine(mpvWindow.pid);
|
|
||||||
if (!commandLine) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (
|
|
||||||
commandLine.includes(`--input-ipc-server=${this.targetMpvSocketPath}`) ||
|
|
||||||
commandLine.includes(`--input-ipc-server ${this.targetMpvSocketPath}`)
|
|
||||||
) {
|
|
||||||
return mpvWindow;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private getWindowCommandLine(pid: number): string | null {
|
private getWindowCommandLine(pid: number): string | null {
|
||||||
|
|||||||
@@ -18,11 +18,51 @@ Height: 720
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('parseX11WindowGeometry preserves negative coordinates', () => {
|
||||||
|
const geometry = parseX11WindowGeometry(`
|
||||||
|
Absolute upper-left X: -1920
|
||||||
|
Absolute upper-left Y: -24
|
||||||
|
Width: 1920
|
||||||
|
Height: 1080
|
||||||
|
`);
|
||||||
|
assert.deepEqual(geometry, {
|
||||||
|
x: -1920,
|
||||||
|
y: -24,
|
||||||
|
width: 1920,
|
||||||
|
height: 1080,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test('parseX11WindowPid parses xprop output', () => {
|
test('parseX11WindowPid parses xprop output', () => {
|
||||||
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = 4242'), 4242);
|
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = 4242'), 4242);
|
||||||
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = not-a-number'), null);
|
assert.equal(parseX11WindowPid('_NET_WM_PID(CARDINAL) = not-a-number'), null);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('X11WindowTracker searches only visible mpv windows', async () => {
|
||||||
|
const commands: Array<{ command: string; args: string[] }> = [];
|
||||||
|
const tracker = new X11WindowTracker(undefined, async (command, args) => {
|
||||||
|
commands.push({ command, args });
|
||||||
|
if (command === 'xdotool') {
|
||||||
|
return '123';
|
||||||
|
}
|
||||||
|
if (command === 'xwininfo') {
|
||||||
|
return `Absolute upper-left X: 0
|
||||||
|
Absolute upper-left Y: 0
|
||||||
|
Width: 640
|
||||||
|
Height: 360`;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
});
|
||||||
|
|
||||||
|
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||||
|
|
||||||
|
assert.deepEqual(commands[0], {
|
||||||
|
command: 'xdotool',
|
||||||
|
args: ['search', '--onlyvisible', '--class', 'mpv'],
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
test('X11WindowTracker skips overlapping polls while one command is in flight', async () => {
|
test('X11WindowTracker skips overlapping polls while one command is in flight', async () => {
|
||||||
let commandCalls = 0;
|
let commandCalls = 0;
|
||||||
let release: (() => void) | undefined;
|
let release: (() => void) | undefined;
|
||||||
|
|||||||
@@ -39,8 +39,8 @@ export function parseX11WindowGeometry(winInfo: string): {
|
|||||||
width: number;
|
width: number;
|
||||||
height: number;
|
height: number;
|
||||||
} | null {
|
} | null {
|
||||||
const xMatch = winInfo.match(/Absolute upper-left X:\s*(\d+)/);
|
const xMatch = winInfo.match(/Absolute upper-left X:\s*(-?\d+)/);
|
||||||
const yMatch = winInfo.match(/Absolute upper-left Y:\s*(\d+)/);
|
const yMatch = winInfo.match(/Absolute upper-left Y:\s*(-?\d+)/);
|
||||||
const widthMatch = winInfo.match(/Width:\s*(\d+)/);
|
const widthMatch = winInfo.match(/Width:\s*(\d+)/);
|
||||||
const heightMatch = winInfo.match(/Height:\s*(\d+)/);
|
const heightMatch = winInfo.match(/Height:\s*(\d+)/);
|
||||||
if (!xMatch || !yMatch || !widthMatch || !heightMatch) {
|
if (!xMatch || !yMatch || !widthMatch || !heightMatch) {
|
||||||
@@ -112,7 +112,12 @@ export class X11WindowTracker extends BaseWindowTracker {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async pollGeometryAsync(): Promise<void> {
|
private async pollGeometryAsync(): Promise<void> {
|
||||||
const windowIdsOutput = await this.runCommand('xdotool', ['search', '--class', 'mpv']);
|
const windowIdsOutput = await this.runCommand('xdotool', [
|
||||||
|
'search',
|
||||||
|
'--onlyvisible',
|
||||||
|
'--class',
|
||||||
|
'mpv',
|
||||||
|
]);
|
||||||
const windowIds = windowIdsOutput.trim();
|
const windowIds = windowIdsOutput.trim();
|
||||||
if (!windowIds) {
|
if (!windowIds) {
|
||||||
this.updateGeometry(null);
|
this.updateGeometry(null);
|
||||||
|
|||||||
Reference in New Issue
Block a user