Fix subtitle-cycle OSD labels and harden quality gates

- Resolve J/Shift+J subtitle-cycle OSD text via track-list labels and property expansion
- Add mpv proxy OSD runtime + regression coverage across IPC/main/runtime paths
- Scope `make pretty` to maintained source files and add scoped Prettier script
- Align release workflow with cache/install order and explicit TypeScript check
- Clean up duplicate submodule entry and remove checked-in docs/plans artifacts
This commit is contained in:
2026-03-07 16:27:40 -08:00
parent 6ae1afd12e
commit 6bd886be47
59 changed files with 772 additions and 929 deletions

View File

@@ -31,9 +31,6 @@ jobs:
with:
node-version: 22.12.0
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Cache dependencies
uses: actions/cache@v4
with:
@@ -48,6 +45,9 @@ jobs:
- name: Install dependencies
run: bun install --frozen-lockfile
- name: Build (TypeScript check)
run: bun run typecheck
- name: Test suite (source)
run: bun run test:fast

1
.gitignore vendored
View File

@@ -37,3 +37,4 @@ tests/*
.worktrees/
.codex/*
.agents/*
docs/*

3
.gitmodules vendored
View File

@@ -5,9 +5,6 @@
[submodule "vendor/yomitan-jlpt-vocab"]
path = vendor/yomitan-jlpt-vocab
url = https://github.com/stephenmk/yomitan-jlpt-vocab
[submodule "yomitan-jlpt-vocab"]
path = vendor/yomitan-jlpt-vocab
url = https://github.com/stephenmk/yomitan-jlpt-vocab
[submodule "vendor/subminer-yomitan"]
path = vendor/subminer-yomitan
url = https://github.com/ksyasuda/subminer-yomitan

View File

@@ -98,7 +98,7 @@ ensure-bun:
@command -v bun >/dev/null 2>&1 || { printf '%s\n' "[ERROR] bun not found"; exit 1; }
pretty: ensure-bun
@bun run format
@bun run format:src
build:
@printf '%s\n' "[INFO] Detected platform: $(PLATFORM)"

View File

@@ -4,15 +4,15 @@ title: Index AniList character alternative names in the character dictionary
status: Done
assignee: []
created_date: '2026-03-07 00:00'
updated_date: '2026-03-07 00:00'
updated_date: '2026-03-08 00:11'
labels:
- dictionary
- anilist
priority: high
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/character-dictionary-runtime.test.ts
- src/main/character-dictionary-runtime.ts
- src/main/character-dictionary-runtime.test.ts
priority: high
---
## Description

View File

@@ -0,0 +1,71 @@
---
id: TASK-111
title: Fix subtitle-cycle OSD labels for J keybindings
status: Done
assignee:
- Codex
created_date: '2026-03-07 23:45'
updated_date: '2026-03-08 00:06'
labels: []
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.ts
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/ipc-command.test.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/core/services/mpv-control.test.ts
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When cycling subtitle tracks with the default J/Shift+J keybindings, the mpv OSD currently shows raw template text like `${sid}` instead of a resolved subtitle label. Update the keybinding OSD behavior so users see the active subtitle selection clearly when cycling tracks, and ensure placeholder-based OSD messages sent through the mpv client API render correctly.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Pressing the primary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${sid}` placeholder.
- [x] #2 Pressing the secondary subtitle cycle keybinding shows a resolved subtitle label on the OSD instead of a raw `${secondary-sid}` placeholder.
- [x] #3 Proxy OSD messages that rely on mpv property expansion render resolved values when sent through the mpv client API.
- [x] #4 Regression tests cover the subtitle-cycle OSD behavior and the placeholder-expansion OSD path.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add focused failing tests for subtitle-cycle OSD labels and mpv placeholder-expansion behavior.
2. Update the IPC mpv command handler to resolve primary and secondary subtitle track labels from mpv `track-list` data after cycling subtitle tracks.
3. Update the mpv OSD runtime path so placeholder-based `show-text` messages sent through the client API opt into property expansion.
4. Run focused tests, then the relevant core test lane, and record results in the task notes.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Initial triage: `ipc-command.ts` emits raw `${sid}`/`${secondary-sid}` placeholder strings, and `showMpvOsdRuntime` sends `show-text` via mpv client API without enabling property expansion.
User approved implementation plan on 2026-03-07.
Implementation: proxy mpv command OSD now supports an async resolver so subtitle track cycling can show human-readable labels instead of raw `${sid}` placeholders.
Implementation: `showMpvOsdRuntime` now prefixes placeholder-based messages with mpv client-api `expand-properties`, which fixes raw `${...}` OSD output for subtitle delay/position messages.
Testing: `bun test src/core/services/ipc-command.test.ts src/core/services/mpv-control.test.ts src/main/runtime/mpv-proxy-osd.test.ts src/main/runtime/ipc-mpv-command-main-deps.test.ts src/main/runtime/ipc-bridge-actions.test.ts src/main/runtime/ipc-bridge-actions-main-deps.test.ts src/main/runtime/composers/ipc-runtime-composer.test.ts` passed.
Testing: `bun x tsc --noEmit` passed.
Testing: `bun run test:core:src` passed (423 pass, 6 skip, 0 fail).
Docs: no update required because no checked-in docs or help text describe the J/Shift+J OSD output behavior.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed subtitle-cycle OSD handling for the default J/Shift+J keybindings. The IPC mpv command path now supports resolving proxy OSD text asynchronously, and the main-runtime resolver reads mpv `track-list` state so primary and secondary subtitle cycling show human-readable track labels instead of raw `${sid}` / `${secondary-sid}` placeholders.
Also fixed the lower-level mpv OSD transport so placeholder-based `show-text` messages sent through the client API opt into `expand-properties`. That preserves existing template-based OSD messages like subtitle delay and subtitle position without leaking the raw `${...}` syntax.
Added regression coverage for the async proxy OSD path, the placeholder-expansion `showMpvOsdRuntime` path, and the runtime subtitle-track label resolver. Verification run: `bun x tsc --noEmit`; focused mpv/IPC tests; and the maintained `bun run test:core:src` lane (423 pass, 6 skip, 0 fail).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,61 @@
---
id: TASK-112
title: Address Claude review items on PR 15
status: Done
assignee:
- codex
created_date: '2026-03-08 00:11'
updated_date: '2026-03-08 00:12'
labels:
- pr-review
- ci
dependencies: []
references:
- .github/workflows/release.yml
- .github/workflows/ci.yml
- .gitmodules
- >-
backlog/tasks/task-101 -
Index-AniList-character-alternative-names-in-the-character-dictionary.md
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Review Claude's PR feedback on PR #15, implement only the technically valid fixes on the current branch, and document which comments are non-actionable or already acceptable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Validated Claude's concrete PR review items against current branch state and repo conventions
- [x] #2 Implemented the accepted fixes with regression coverage or verification where applicable
- [x] #3 Documented which review items are non-blocking or intentionally left unchanged
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Validate each Claude review item against current branch files and repo workflow.
2. Patch release quality-gate to match CI ordering and add explicit typecheck.
3. Remove duplicate .gitmodules stanza and normalize the TASK-101 reference path through Backlog MCP.
4. Run relevant verification for workflow/config metadata changes and record which review items remain non-actionable.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
User asked to address Claude PR comments on PR #15 and assess whether any action items remain. Treat review suggestions skeptically; only fix validated defects.
Validated Claude's five review items. Fixed release workflow ordering/typecheck, removed the duplicate .gitmodules entry, and normalized TASK-101 references to repo-relative paths via Backlog MCP.
Left the vendor/subminer-yomitan branch-pin suggestion unchanged. The committed submodule SHA already controls reproducibility; adding a branch would only affect update ergonomics and was not required to address a concrete defect.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Validated Claude's PR #15 review summary against the current branch and applied the actionable fixes. In `.github/workflows/release.yml`, the release `quality-gate` job now restores the dependency cache before installation, no longer installs twice, and runs `bun run typecheck` before the fast test suite to match CI expectations. In `.gitmodules`, removed the duplicate `vendor/yomitan-jlpt-vocab` stanza with the conflicting duplicate path. Through Backlog MCP, updated `TASK-101` references from an absolute local path to repo-relative paths so the task metadata is portable across contributors.
Verification: `git diff --check`, `git config -f .gitmodules --get-regexp '^submodule\..*\.path$'`, `bun run typecheck`, and `bun run test:fast` all passed. `bun run format:check` still fails on many pre-existing unrelated files already present on the branch, including multiple backlog task files and existing source/docs files; this review patch did not attempt a repo-wide formatting sweep.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,59 @@
---
id: TASK-113
title: Scope make pretty to maintained source files
status: Done
assignee:
- codex
created_date: '2026-03-08 00:20'
updated_date: '2026-03-08 00:22'
labels:
- tooling
- formatting
dependencies: []
references:
- Makefile
- package.json
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Change the `make pretty` workflow so it formats only the maintained source/config files we intentionally keep under Prettier, instead of sweeping backlog/docs/generated content across the whole repository.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `make pretty` formats only the approved maintained source/config paths
- [x] #2 The allowlist is reusable for check/write flows instead of duplicating path logic
- [x] #3 Verification shows the scoped formatting command targets the intended files without touching backlog or vendored content
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Inspect current Prettier config/ignore behavior and keep the broad repo-wide format command unchanged.
2. Add a reusable scoped Prettier script that targets maintained source/config paths only.
3. Update `make pretty` to call the scoped script.
4. Verify the scoped command resolves only intended files and does not traverse backlog or vendor paths.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
User approved the allowlist approach: keep repo-wide `format` intact, make `make pretty` use a maintained-path formatter scope.
Added `scripts/prettier-scope.sh` as the single allowlist for scoped Prettier paths and wired `format:src` / `format:check:src` to it.
Updated `make pretty` to call `bun run format:src`. Verified with `make -n pretty` and shell tracing that the helper only targets the maintained allowlist and does not traverse `backlog/` or `vendor/`.
Excluded `Makefile` and `.prettierignore` from the allowlist after verification showed Prettier cannot infer parsers for them.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Scoped the repo's day-to-day formatting entrypoint without changing the existing broad repo-wide Prettier scripts. Added `scripts/prettier-scope.sh` as the shared allowlist for maintained source/config paths (`.github`, `build`, `launcher`, `scripts`, `src`, plus selected root JSON config files), added `format:src` and `format:check:src` in `package.json`, and updated `make pretty` to run the scoped formatter.
Verification: `make -n pretty` now resolves to `bun run format:src`. `bash -n scripts/prettier-scope.sh` passed, and shell-traced `bash -x scripts/prettier-scope.sh --check` confirmed the exact allowlist passed to Prettier. `bun run format:check:src` fails only because existing files inside the allowed source scope are not currently formatted; it no longer touches `backlog/` or `vendor/`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -1,28 +0,0 @@
# Anki Integration
read_when:
- changing `src/anki-integration.ts`
- changing Anki transport/config hot-reload behavior
- tracing note update, field grouping, or proxy ownership
## Ownership
- `src/anki-integration.ts`: thin facade; wires dependencies; exposes public Anki API used by runtime/services.
- `src/anki-integration/runtime.ts`: normalized config state, polling-vs-proxy transport lifecycle, runtime config patch handling.
- `src/anki-integration/card-creation.ts`: sentence/audio card creation and clipboard update flow.
- `src/anki-integration/note-update-workflow.ts`: enrich newly added notes.
- `src/anki-integration/field-grouping.ts`: preview/build helpers for Kiku field grouping.
- `src/anki-integration/field-grouping-workflow.ts`: auto/manual merge execution.
- `src/anki-integration/anki-connect-proxy.ts`: local proxy transport for post-add enrichment.
- `src/anki-integration/known-word-cache.ts`: known-word cache lifecycle and persistence.
## Refactor seam
`AnkiIntegrationRuntime` owns the cluster that previously mixed:
- config normalization/defaulting
- polling vs proxy startup/shutdown
- transport restart decisions during runtime patches
- known-word cache lifecycle toggles tied to config changes
Keep new orchestration work in `runtime.ts` when it changes process-level Anki state. Keep note/card behavior in the workflow/service modules.

View File

@@ -1,50 +0,0 @@
# Character Name Gating Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Disable subtitle character-name lookup/highlighting when the AniList character dictionary feature is disabled, while keeping tokenization and all other annotations working.
**Architecture:** Gate `getNameMatchEnabled` at the runtime-deps boundary used by subtitle tokenization. Keep the tokenizer pipeline intact and only suppress character-name metadata requests when `anilist.characterDictionary.enabled` is false, regardless of `subtitleStyle.nameMatchEnabled`.
**Tech Stack:** TypeScript, Bun test runner, Electron main/runtime wiring.
---
### Task 1: Add runtime gating coverage
**Files:**
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.test.ts`
**Step 1: Write the failing test**
Add a test proving `getNameMatchEnabled()` resolves to `false` when `getCharacterDictionaryEnabled()` is `false` even if `getNameMatchEnabled()` is `true`.
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: FAIL because the deps builder does not yet combine the two flags.
### Task 2: Implement minimal runtime gate
**Files:**
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.ts`
- Modify: `src/main.ts`
**Step 3: Write minimal implementation**
Add `getCharacterDictionaryEnabled` to the main handler deps and make the built `getNameMatchEnabled` return true only when both the subtitle setting and the character dictionary setting are enabled.
**Step 4: Run tests to verify green**
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: PASS.
### Task 3: Verify no regressions in related tokenization seams
**Files:**
- Modify: none unless failures reveal drift
**Step 5: Run focused verification**
Run: `bun test src/core/services/subtitle-processing-controller.test.ts src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: PASS.

View File

@@ -1,155 +0,0 @@
# Immersion SQLite Verification Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Make the SQLite-backed immersion tracking persistence tests visible in the repo's verification surface and reproducible through at least one documented automated command.
**Architecture:** Keep the existing Bun fast lane intact for routine local verification, but add an explicit SQLite verification lane that runs the database-backed immersion tests under a runtime with `node:sqlite` support. Surface unsupported-runtime behavior clearly in the source tests and contributor docs so skipped or omitted coverage is no longer mistaken for a fully green persistence lane.
**Tech Stack:** TypeScript, Bun scripts in `package.json`, Node's built-in `node:test` and `node:sqlite`, GitHub Actions workflows, Markdown docs in `README.md`.
---
### Task 1: Audit and expose the SQLite-backed immersion test surface
**Files:**
- Modify: `src/core/services/immersion-tracker-service.test.ts`
- Modify: `src/core/services/immersion-tracker/storage-session.test.ts`
- Reference: `src/main/runtime/registry.test.ts`
**Step 1: Write the failing test**
Refactor the SQLite-gated immersion tests so missing `node:sqlite` support is reported with an explicit skip reason instead of a silent top-level `test.skip` alias.
**Step 2: Run test to verify it fails**
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
Expected: the current output shows generic skips or hides the storage-session suite from normal scripted verification, which is too opaque for contributors.
**Step 3: Write minimal implementation**
Mirror the `src/main/runtime/registry.test.ts` pattern: add a helper that either loads `DatabaseSync` or skips with a message like `requires node:sqlite support in this runtime`, then wrap each SQLite-backed test through that helper.
**Step 4: Run test to verify it passes**
Run: `bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts`
Expected: PASS, with explicit skip messages in unsupported runtimes.
### Task 2: Add a reproducible SQLite verification command
**Files:**
- Modify: `package.json`
- Reference: `src/core/services/immersion-tracker-service.test.ts`
- Reference: `src/core/services/immersion-tracker/storage-session.test.ts`
**Step 1: Write the failing test**
Add a dedicated script contract for the SQLite-backed immersion verification lane so both persistence-heavy suites are intentionally grouped and runnable together.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: FAIL because no such reproducible lane exists yet.
**Step 3: Write minimal implementation**
Update `package.json` with explicit scripts for the SQLite lane. Prefer a command shape that actually executes the built JS tests under Node with `node:sqlite` support, for example:
- `test:immersion:sqlite:dist`: `node --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js`
- `test:immersion:sqlite`: `bun run build && bun run test:immersion:sqlite:dist`
If build cost or runtime behavior requires a small adjustment, keep the core contract the same: one documented command must run both SQLite-backed immersion suites end-to-end.
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS in a Node runtime with `node:sqlite`, executing both persistence suites without Bun-only skips.
### Task 3: Wire the SQLite lane into automated verification
**Files:**
- Modify: `.github/workflows/ci.yml`
- Modify: `.github/workflows/release.yml`
- Reference: `package.json`
**Step 1: Write the failing test**
Add the new SQLite immersion lane to the repo's automated verification so contributors and CI can rely on a real persistence check rather than the Bun fast lane alone.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: local command may pass, but CI/release workflows still omit the lane entirely.
**Step 3: Write minimal implementation**
Update both workflows to provision a Node version with `node:sqlite` support before the SQLite lane runs, then execute `bun run test:immersion:sqlite` in the quality gate after the bundle build produces `dist/**` test files.
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS locally, and workflow definitions clearly show the SQLite lane as part of automated verification.
### Task 4: Document contributor-facing prerequisites and commands
**Files:**
- Modify: `README.md`
- Reference: `package.json`
- Reference: `.github/workflows/ci.yml`
**Step 1: Write the failing test**
Extend the verification docs so contributors can discover the SQLite lane, know why the Bun source lane may skip those cases, and understand which command reproduces the persistence coverage.
**Step 2: Run test to verify it fails**
Run: `grep -n "test:immersion:sqlite" README.md`
Expected: FAIL because the dedicated immersion SQLite lane is undocumented.
**Step 3: Write minimal implementation**
Update `README.md` to document:
- the Bun fast/default lane versus the SQLite persistence lane
- the `node:sqlite` prerequisite for the reproducible command
- that the dedicated lane covers session persistence/finalization behavior beyond seam tests
**Step 4: Run test to verify it passes**
Run: `grep -n "test:immersion:sqlite" README.md && grep -n "node:sqlite" README.md`
Expected: PASS, with clear contributor guidance.
### Task 5: Verify persistence coverage end-to-end
**Files:**
- Test: `src/core/services/immersion-tracker-service.test.ts`
- Test: `src/core/services/immersion-tracker/storage-session.test.ts`
- Reference: `README.md`
- Reference: `package.json`
**Step 1: Write the failing test**
Prove the final lane exercises real DB-backed persistence/finalization paths, not just the seam tests.
**Step 2: Run test to verify it fails**
Run: `bun run test:immersion:sqlite`
Expected: before implementation, the command does not exist or does not cover both SQLite-backed suites.
**Step 3: Write minimal implementation**
Keep the dedicated lane pointed at both existing SQLite-backed test files so it covers representative finalization and persistence behavior such as:
- `destroy finalizes active session and persists final telemetry`
- `start/finalize session updates ended_at and status`
- `executeQueuedWrite inserts event and telemetry rows`
**Step 4: Run test to verify it passes**
Run: `bun run test:immersion:sqlite`
Expected: PASS, with those DB-backed persistence/finalization cases executing successfully under Node.

View File

@@ -1,92 +0,0 @@
# Merged Character Dictionary Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Replace per-anime character dictionary imports with one merged Yomitan dictionary driven by MRU usage retention.
**Architecture:** Persist normalized per-media character dictionary snapshots locally, maintain MRU retained media ids in auto-sync state, and rebuild a single merged Yomitan zip only when the retained set changes. Keep external AniList fetches only for media without a local snapshot; normal revisits stay local.
**Tech Stack:** TypeScript, Bun test, Node fs/path, existing Yomitan zip generation helpers.
---
### Task 1: Lock in merged auto-sync behavior
**Files:**
- Modify: `src/main/runtime/character-dictionary-auto-sync.test.ts`
- Test: `src/main/runtime/character-dictionary-auto-sync.test.ts`
**Step 1: Write the failing test**
Add tests for:
- single merged dictionary title/import replacing per-media imports
- MRU reorder causing rebuild only when order changes
- unchanged revisit skipping rebuild/import
- capped retained set evicting least-recently-used media
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
Expected: FAIL on old per-media import assumptions / missing merged behavior
**Step 3: Write minimal implementation**
Update auto-sync runtime to track retained media ids and merged revision/hash, call merged zip builder, and replace one imported Yomitan dictionary.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts`
Expected: PASS
### Task 2: Add snapshot + merged-zip runtime support
**Files:**
- Modify: `src/main/character-dictionary-runtime.ts`
- Modify: `src/main/character-dictionary-runtime.test.ts`
- Test: `src/main/character-dictionary-runtime.test.ts`
**Step 1: Write the failing test**
Add tests for:
- saving/loading normalized per-media snapshots without per-media zip cache
- building merged zip from retained media snapshots with stable dictionary title
- preserving images/terms from multiple media in merged output
**Step 2: Run test to verify it fails**
Run: `bun test src/main/character-dictionary-runtime.test.ts`
Expected: FAIL because snapshot/merged APIs do not exist yet
**Step 3: Write minimal implementation**
Refactor dictionary runtime to expose snapshot generation/loading and merged zip building from stored metadata/images.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/character-dictionary-runtime.test.ts`
Expected: PASS
### Task 3: Wire app/runtime config and docs
**Files:**
- Modify: `src/main.ts`
- Modify: `src/config/definitions/options-integrations.ts`
- Modify: `README.md`
**Step 1: Write the failing test**
Add or update tests if needed for new dependency wiring / docs-adjacent config description expectations.
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts`
Expected: FAIL until wiring matches merged flow
**Step 3: Write minimal implementation**
Swap app wiring to new snapshot + merged build API, update config/docs text from TTL semantics to usage-based merged retention.
**Step 4: Run test to verify it passes**
Run: `bun test src/main/runtime/character-dictionary-auto-sync.test.ts src/main/character-dictionary-runtime.test.ts && bun run tsc --noEmit`
Expected: PASS

View File

@@ -1,121 +0,0 @@
# Subtitle Sync Verification Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Replace the no-op `test:subtitle` lane with real automated subtitle-sync verification that reuses the maintained subsync tests and documents the real contributor workflow.
**Architecture:** Repoint the subtitle verification command at the existing source-level subsync tests instead of inventing a second hidden suite. Add one focused ffsubsync failure-path test so the subtitle lane explicitly covers both engines plus a non-happy path, then update contributor docs to describe the dedicated subtitle lane and how it relates to `test:core`.
**Tech Stack:** TypeScript, Bun test, Node test/assert, npm package scripts, Markdown docs.
---
### Task 1: Lock subtitle lane to real subsync tests
**Files:**
- Modify: `package.json`
**Step 1: Write the failing test**
Define the intended command shape first: `test:subtitle:src` should run `src/core/services/subsync.test.ts` and `src/subsync/utils.test.ts`, `test:subtitle` should invoke that real source lane, and no placeholder echo should remain.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle`
Expected: It performs a build and prints `Subtitle tests are currently not configured`, proving the lane is still a no-op.
**Step 3: Write minimal implementation**
Update `package.json` so:
- `test:subtitle:src` runs `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`
- `test:subtitle` runs the new source lane directly
- `test:subtitle:dist` is removed if it is no longer the real verification path
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle`
Expected: PASS with Bun executing the real subtitle-sync test files.
### Task 2: Add explicit ffsubsync non-happy-path coverage
**Files:**
- Modify: `src/core/services/subsync.test.ts`
- Test: `src/core/services/subsync.test.ts`
**Step 1: Write the failing test**
Add a test that runs `runSubsyncManual({ engine: 'ffsubsync' })` with a stub ffsubsync executable that exits non-zero and writes stderr, then assert:
- `result.ok === false`
- `result.message` starts with `ffsubsync synchronization failed`
- the failure message includes command details surfaced to the user
**Step 2: Run test to verify it fails**
Run: `bun test src/core/services/subsync.test.ts`
Expected: FAIL because ffsubsync failure propagation is not asserted yet.
**Step 3: Write minimal implementation**
Keep production code unchanged unless the new test exposes a real bug. If needed, tighten failure assertions or message propagation in `src/core/services/subsync.ts` without changing successful behavior.
**Step 4: Run test to verify it passes**
Run: `bun test src/core/services/subsync.test.ts`
Expected: PASS with both alass and ffsubsync paths covered, including a non-happy path.
### Task 3: Make contributor docs match the real verification path
**Files:**
- Modify: `README.md`
- Modify: `package.json`
**Step 1: Write the failing test**
Use the repository state as the failure signal: README currently advertises subtitle sync as a feature but does not tell contributors that `bun run test:subtitle` is the real verification lane.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle && bun test src/subsync/utils.test.ts`
Expected: Tests pass, but docs still do not explain the lane; this is the remaining acceptance-criteria gap.
**Step 3: Write minimal implementation**
Update `README.md` with a short contributor-facing verification note that:
- points to `bun run test:subtitle` for subtitle-sync coverage
- states that the lane reuses the maintained subsync tests already included in broader core coverage
- avoids implying there is a separate hidden subtitle test harness beyond those tests
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle`
Expected: PASS, with docs and scripts now aligned around the same subtitle verification strategy.
### Task 4: Verify matrix integration stays clean
**Files:**
- Modify: `package.json` (only if Task 1/3 exposed cleanup needs)
**Step 1: Write the failing test**
Treat duplication as the failure condition: confirm the dedicated subtitle lane reuses the same maintained files already present in `test:core:src` rather than creating a second divergent suite.
**Step 2: Run test to verify it fails**
Run: `bun run test:subtitle && bun run test:core:src`
Expected: If file lists diverge unexpectedly, this review step exposes it before handoff.
**Step 3: Write minimal implementation**
If needed, do the smallest script cleanup necessary so subtitle coverage remains explicit without hiding or duplicating existing core coverage.
**Step 4: Run test to verify it passes**
Run: `bun run test:subtitle && bun run test:core:src`
Expected: PASS, confirming the dedicated lane and the broader core suite agree on subtitle coverage.

View File

@@ -1,169 +0,0 @@
# Testing Workflow Test Matrix Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Make the standard test commands reflect the maintained test surface so newly added tests are discovered automatically or intentionally documented outside the default lane.
**Architecture:** Replace the current hand-maintained file allowlists in `package.json` with directory-based Bun test lanes that map to maintained test surfaces. Keep the default developer lane fast, move slower or environment-specific checks into explicit commands, and document the resulting matrix in `README.md` so contributors know exactly which command to run.
**Tech Stack:** TypeScript, Bun test, npm-style package scripts in `package.json`, Markdown docs in `README.md`.
---
### Task 1: Lock in the desired script matrix with failing tests/audit checks
**Files:**
- Modify: `package.json`
- Test: `package.json`
- Reference: `src/main-entry-runtime.test.ts`
- Reference: `src/anki-integration/anki-connect-proxy.test.ts`
- Reference: `src/main/runtime/registry.test.ts`
**Step 1: Write the failing test**
Add a new script structure in `package.json` expectations by editing the script map so these lanes exist conceptually:
- `test:fast` for default fast verification
- `test:full` for the maintained source test surface
- `test:env` for environment-specific checks
The fast lane should stay selective and intentional. The full lane should use directory-based discovery rather than file-by-file allowlists, with representative coverage from:
- `src/main-entry-runtime.test.ts`
- `src/anki-integration/**/*.test.ts`
- `src/main/**/*.test.ts`
- `launcher/**/*.test.ts`
**Step 2: Run test to verify it fails**
Run: `bun run test:full`
Expected: FAIL because `test:full` does not exist yet, and previously omitted maintained tests are still outside the standard matrix.
**Step 3: Write minimal implementation**
Update `package.json` scripts so:
- `test` points at `test:fast`
- `test:fast` runs the fast default lane only
- `test:full` runs directory-based maintained suites instead of file allowlists
- `test:env` runs environment-specific verification (for example launcher/plugin and sqlite-gated suites)
- subsystem scripts use stable path globs or directory arguments so new tests are discovered automatically
Prefer commands like these, adjusted only as needed for Bun behavior in this repo:
- `bun test src/config/**/*.test.ts`
- `bun test src/{cli,core,renderer,subtitle,subsync,main,anki-integration}/*.test.ts ...` only if Bun cannot take the broader directory directly
- `bun test launcher/**/*.test.ts`
Do not keep large hand-maintained file enumerations for maintained unit/integration lanes.
**Step 4: Run test to verify it passes**
Run: `bun run test:full`
Expected: PASS, including automated execution of representative tests that were previously omitted from the standard matrix.
### Task 2: Separate environment-specific verification from the maintained default/full lanes
**Files:**
- Modify: `package.json`
- Test: `src/main/runtime/registry.test.ts`
- Test: `launcher/smoke.e2e.test.ts`
- Test: `src/core/services/immersion-tracker-service.test.ts`
**Step 1: Write the failing test**
Refine the package scripts so environment-specific checks are explicitly grouped outside the default fast lane. Treat these as the primary environment-specific examples unless repo behavior proves a better split during execution:
- launcher smoke/plugin checks that rely on local process or Lua execution
- sqlite-dependent checks that may skip when `node:sqlite` is unavailable
**Step 2: Run test to verify it fails**
Run: `bun run test:env`
Expected: FAIL because the environment-specific lane is not defined yet.
**Step 3: Write minimal implementation**
Add explicit environment-specific scripts in `package.json`, such as:
- a launcher/plugin lane that runs `launcher/smoke.e2e.test.ts` plus `lua scripts/test-plugin-start-gate.lua`
- a sqlite lane for tests that require `node:sqlite` support or otherwise need environment notes
- an aggregate `test:env` command that runs all environment-specific lanes
Keep these lanes documented and reproducible rather than silently excluded.
**Step 4: Run test to verify it passes**
Run: `bun run test:env`
Expected: PASS in supported environments, or clear documented skip behavior where the tests themselves intentionally gate on missing runtime support.
### Task 3: Document contributor-facing test commands and matrix
**Files:**
- Modify: `README.md`
- Reference: `package.json`
**Step 1: Write the failing test**
Add a contributor-focused testing section requirement in `README.md` expectations:
- fast verification command
- full verification command
- environment-specific verification command
- plain-language explanation of which suites each lane covers and why
**Step 2: Run test to verify it fails**
Run: `grep -n "Testing" README.md`
Expected: no contributor testing matrix section exists yet.
**Step 3: Write minimal implementation**
Update `README.md` with a concise `Testing` section that documents:
- `bun run test` / `bun run test:fast` for fast local verification
- `bun run test:full` for the maintained source test surface
- `bun run test:env` for environment-specific verification
- any important notes about sqlite-gated tests and launcher/plugin checks
Keep the matrix concrete and reproducible.
**Step 4: Run test to verify it passes**
Run: `grep -n "Testing" README.md && grep -n "test:full" README.md && grep -n "test:env" README.md`
Expected: PASS with the new contributor-facing matrix present.
### Task 4: Verify representative omitted suites now belong to automated lanes
**Files:**
- Test: `src/main-entry-runtime.test.ts`
- Test: `src/anki-integration/anki-connect-proxy.test.ts`
- Test: `src/main/runtime/registry.test.ts`
- Reference: `package.json`
- Reference: `README.md`
**Step 1: Write the failing test**
Use targeted command checks to prove these previously omitted surfaces are now in the matrix:
- entry/runtime: `src/main-entry-runtime.test.ts`
- Anki integration: `src/anki-integration/anki-connect-proxy.test.ts`
- main runtime: `src/main/runtime/registry.test.ts`
**Step 2: Run test to verify it fails**
Run: `bun run test:full src/main-entry-runtime.test.ts`
Expected: either unsupported invocation or evidence that the current matrix still does not include these surfaces automatically.
**Step 3: Write minimal implementation**
Adjust the final script paths/globs until the full matrix includes those representative surfaces without file-by-file script maintenance.
**Step 4: Run test to verify it passes**
Run: `bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/main/runtime/registry.test.ts && bun run test:fast && bun run test:full`
Expected: PASS, with at least one representative test from each required surface executing through the documented automated lanes.

View File

@@ -16,7 +16,11 @@ import { generateYoutubeSubtitles } from '../youtube.js';
import type { Args } from '../types.js';
import type { LauncherCommandContext } from './context.js';
import { ensureLauncherSetupReady } from '../setup-gate.js';
import { getDefaultConfigDir, getSetupStatePath, readSetupState } from '../../src/shared/setup-state.js';
import {
getDefaultConfigDir,
getSetupStatePath,
readSetupState,
} from '../../src/shared/setup-state.js';
const SETUP_WAIT_TIMEOUT_MS = 10 * 60 * 1000;
const SETUP_POLL_INTERVAL_MS = 500;

View File

@@ -16,6 +16,8 @@
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
"format": "prettier --write .",
"format:check": "prettier --check .",
"format:src": "bash scripts/prettier-scope.sh --write",
"format:check:src": "bash scripts/prettier-scope.sh --check",
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts",
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js",
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",

20
scripts/prettier-scope.sh Normal file
View File

@@ -0,0 +1,20 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
cd "$ROOT_DIR"
paths=(
"package.json"
"tsconfig.json"
"tsconfig.renderer.json"
"tsconfig.typecheck.json"
".prettierrc.json"
".github"
"build"
"launcher"
"scripts"
"src"
)
exec bunx prettier "$@" "${paths[@]}"

View File

@@ -55,7 +55,10 @@ test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
assert.equal(normalized.proxy?.host, '0.0.0.0');
assert.equal(normalized.proxy?.port, 7001);
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
assert.equal(normalized.media?.fallbackDuration, DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration);
assert.equal(
normalized.media?.fallbackDuration,
DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration,
);
});
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
@@ -70,10 +73,7 @@ test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled',
runtime.start();
assert.deepEqual(calls, [
'known:start',
'proxy:start:127.0.0.1:9999:http://upstream:8765',
]);
assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']);
});
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {

View File

@@ -31,8 +31,7 @@ function trimToNonEmptyString(value: unknown): string | null {
}
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
const resolvedUrl =
trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
const resolvedUrl = trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
const proxySource =
config.proxy && typeof config.proxy === 'object'
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)

View File

@@ -166,9 +166,7 @@ test('parses texthooker.launchAtStartup and warns on invalid values', () => {
DEFAULT_CONFIG.texthooker.launchAtStartup,
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'texthooker.launchAtStartup'),
invalidService.getWarnings().some((warning) => warning.path === 'texthooker.launchAtStartup'),
);
});
@@ -211,14 +209,10 @@ test('parses annotationWebsocket settings and warns on invalid values', () => {
DEFAULT_CONFIG.annotationWebsocket.port,
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'annotationWebsocket.enabled'),
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.enabled'),
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'annotationWebsocket.port'),
invalidService.getWarnings().some((warning) => warning.path === 'annotationWebsocket.port'),
);
});
@@ -350,8 +344,8 @@ test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
const validService = new ConfigService(validDir);
assert.equal(
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>).nameMatchColor ??
null) as string | null,
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
.nameMatchColor ?? null) as string | null,
'#eed49f',
);
@@ -373,9 +367,7 @@ test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
'#f5bde6',
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
invalidService.getWarnings().some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
);
});
@@ -505,10 +497,16 @@ test('parses anilist.characterDictionary config with clamping and enum validatio
assert.equal(config.anilist.characterDictionary.maxLoaded, 20);
assert.equal(config.anilist.characterDictionary.evictionPolicy, 'delete');
assert.equal(config.anilist.characterDictionary.profileScope, 'all');
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'));
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.refreshTtlHours'),
);
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.maxLoaded'));
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'));
assert.ok(warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'));
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.evictionPolicy'),
);
assert.ok(
warnings.some((warning) => warning.path === 'anilist.characterDictionary.profileScope'),
);
});
test('parses anilist.characterDictionary.collapsibleSections booleans and warns on invalid values', () => {

View File

@@ -175,7 +175,8 @@ export function buildIntegrationConfigOptionRegistry(
path: 'anilist.characterDictionary.collapsibleSections.description',
kind: 'boolean',
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.description,
description: 'Open the Description section by default in character dictionary glossary entries.',
description:
'Open the Description section by default in character dictionary glossary entries.',
},
{
path: 'anilist.characterDictionary.collapsibleSections.characterInformation',
@@ -189,7 +190,8 @@ export function buildIntegrationConfigOptionRegistry(
path: 'anilist.characterDictionary.collapsibleSections.voicedBy',
kind: 'boolean',
defaultValue: defaultConfig.anilist.characterDictionary.collapsibleSections.voicedBy,
description: 'Open the Voiced by section by default in character dictionary glossary entries.',
description:
'Open the Voiced by section by default in character dictionary glossary entries.',
},
{
path: 'jellyfin.enabled',

View File

@@ -238,7 +238,9 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
);
if (nameMatchEnabled !== undefined) {
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
} else if ((src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined) {
} else if (
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined
) {
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
warn(
'subtitleStyle.nameMatchEnabled',

View File

@@ -99,8 +99,7 @@ test('runAppReadyRuntime starts texthooker on startup when enabled in config', a
calls.indexOf('createMpvClient') < calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678'),
);
assert.ok(
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') <
calls.indexOf('handleInitialArgs'),
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') < calls.indexOf('handleInitialArgs'),
);
});

View File

@@ -261,7 +261,8 @@ export function handleCliCommand(
const ignoreSecondInstanceStart =
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
const shouldStart = (!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
const shouldStart =
(!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;

View File

@@ -38,6 +38,7 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
mpvSendCommand: (command) => {
sentCommands.push(command);
},
resolveProxyCommandOsd: async () => null,
isMpvConnected: () => true,
hasRuntimeOptionsManager: () => true,
...overrides,
@@ -52,30 +53,39 @@ test('handleMpvCommandFromIpc forwards regular mpv commands', () => {
assert.deepEqual(osd, []);
});
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', () => {
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
});
test('handleMpvCommandFromIpc emits osd for primary subtitle track keybinding proxies', () => {
const { options, sentCommands, osd } = createOptions();
test('handleMpvCommandFromIpc emits resolved osd for primary subtitle track keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions({
resolveProxyCommandOsd: async () => 'Subtitle track: Internal #3 - Japanese (active)',
});
handleMpvCommandFromIpc(['cycle', 'sid'], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
assert.deepEqual(osd, ['Subtitle track: ${sid}']);
assert.deepEqual(osd, ['Subtitle track: Internal #3 - Japanese (active)']);
});
test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding proxies', () => {
const { options, sentCommands, osd } = createOptions();
test('handleMpvCommandFromIpc emits resolved osd for secondary subtitle track keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions({
resolveProxyCommandOsd: async () =>
'Secondary subtitle track: External #8 - English Commentary',
});
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
assert.deepEqual(osd, ['Secondary subtitle track: External #8 - English Commentary']);
});
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', async () => {
const { options, sentCommands, osd } = createOptions();
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
await new Promise((resolve) => setImmediate(resolve));
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
});

View File

@@ -23,6 +23,7 @@ export interface HandleMpvCommandFromIpcOptions {
mpvPlayNextSubtitle: () => void;
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
mpvSendCommand: (command: (string | number)[]) => void;
resolveProxyCommandOsd?: (command: (string | number)[]) => Promise<string | null>;
isMpvConnected: () => boolean;
hasRuntimeOptionsManager: () => boolean;
}
@@ -36,7 +37,7 @@ const MPV_PROPERTY_COMMANDS = new Set([
'multiply',
]);
function resolveProxyCommandOsd(command: (string | number)[]): string | null {
function resolveProxyCommandOsdTemplate(command: (string | number)[]): string | null {
const operation = typeof command[0] === 'string' ? command[0] : '';
const property = typeof command[1] === 'string' ? command[1] : '';
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
@@ -55,6 +56,25 @@ function resolveProxyCommandOsd(command: (string | number)[]): string | null {
return null;
}
function showResolvedProxyCommandOsd(
command: (string | number)[],
options: HandleMpvCommandFromIpcOptions,
): void {
const template = resolveProxyCommandOsdTemplate(command);
if (!template) return;
const emit = async () => {
try {
const resolved = await options.resolveProxyCommandOsd?.(command);
options.showMpvOsd(resolved || template);
} catch {
options.showMpvOsd(template);
}
};
void emit();
}
export function handleMpvCommandFromIpc(
command: (string | number)[],
options: HandleMpvCommandFromIpcOptions,
@@ -103,10 +123,7 @@ export function handleMpvCommandFromIpc(
options.mpvPlayNextSubtitle();
} else {
options.mpvSendCommand(command);
const osd = resolveProxyCommandOsd(command);
if (osd) {
options.showMpvOsd(osd);
}
showResolvedProxyCommandOsd(command, options);
}
}
}

View File

@@ -22,6 +22,22 @@ test('showMpvOsdRuntime sends show-text when connected', () => {
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
});
test('showMpvOsdRuntime enables property expansion for placeholder-based messages', () => {
const commands: (string | number)[][] = [];
showMpvOsdRuntime(
{
connected: true,
send: ({ command }) => {
commands.push(command);
},
},
'Subtitle delay: ${sub-delay}',
);
assert.deepEqual(commands, [
['expand-properties', 'show-text', 'Subtitle delay: ${sub-delay}', '3000'],
]);
});
test('showMpvOsdRuntime logs fallback when disconnected', () => {
const logs: string[] = [];
showMpvOsdRuntime(

View File

@@ -53,7 +53,10 @@ export function showMpvOsdRuntime(
fallbackLog: (text: string) => void = (line) => logger.info(line),
): void {
if (mpvClient && mpvClient.connected) {
mpvClient.send({ command: ['show-text', text, '3000'] });
const command = text.includes('${')
? ['expand-properties', 'show-text', text, '3000']
: ['show-text', text, '3000'];
mpvClient.send({ command });
return;
}
fallbackLog(`OSD (MPV not connected): ${text}`);

View File

@@ -12,7 +12,7 @@ test('injectTexthookerBootstrapHtml injects websocket bootstrap before head clos
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
);
assert.ok(actual.indexOf('</script></head>') !== -1);
assert.ok(actual.includes("bannou-texthooker-websocketUrl"));
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));

View File

@@ -764,11 +764,9 @@ test('requestYomitanScanTokens skips fallback fragments without exact primary so
});
});
const result = await requestYomitanScanTokens(
'だが それでも届かぬ高みがあった',
deps,
{ error: () => undefined },
);
const result = await requestYomitanScanTokens('だが それでも届かぬ高みがあった', deps, {
error: () => undefined,
});
assert.deepEqual(
result?.map((token) => ({
@@ -875,7 +873,8 @@ test('dictionary settings helpers upsert and remove dictionary entries without r
const upsertScript = scripts.find(
(script) =>
script.includes('setAllSettings') && script.includes('"SubMiner Character Dictionary (AniList 1)"'),
script.includes('setAllSettings') &&
script.includes('"SubMiner Character Dictionary (AniList 1)"'),
);
assert.ok(upsertScript);
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
@@ -915,9 +914,18 @@ test('importYomitanDictionaryFromZip uses settings automation bridge instead of
});
assert.equal(imported, true);
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
assert.equal(scripts.some((script) => script.includes('importDictionaryArchiveBase64')), true);
assert.equal(scripts.some((script) => script.includes('subminerImportDictionary')), false);
assert.equal(
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
true,
);
assert.equal(
scripts.some((script) => script.includes('importDictionaryArchiveBase64')),
true,
);
assert.equal(
scripts.some((script) => script.includes('subminerImportDictionary')),
false,
);
});
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
@@ -947,7 +955,16 @@ test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of
);
assert.equal(deleted, true);
assert.equal(scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')), true);
assert.equal(scripts.some((script) => script.includes('deleteDictionary')), true);
assert.equal(scripts.some((script) => script.includes('subminerDeleteDictionary')), false);
assert.equal(
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
true,
);
assert.equal(
scripts.some((script) => script.includes('deleteDictionary')),
true,
);
assert.equal(
scripts.some((script) => script.includes('subminerDeleteDictionary')),
false,
);
});

View File

@@ -562,9 +562,7 @@ async function createYomitanExtensionWindow(
});
return window;
} catch (err) {
logger.error(
`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`,
);
logger.error(`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`);
if (!window.isDestroyed()) {
window.destroy();
}
@@ -1043,13 +1041,15 @@ export async function requestYomitanScanTokens(
}
if (Array.isArray(rawResult)) {
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
return selectedTokens?.map((token) => ({
surface: token.surface,
reading: token.reading,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
})) ?? null;
return (
selectedTokens?.map((token) => ({
surface: token.surface,
reading: token.reading,
headword: token.headword,
startPos: token.startPos,
endPos: token.endPos,
})) ?? null
);
}
return null;
} catch (err) {
@@ -1523,7 +1523,12 @@ export async function getYomitanDictionaryInfo(
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
): Promise<YomitanDictionaryInfo[]> {
const result = await invokeYomitanBackendAction<unknown>('getDictionaryInfo', undefined, deps, logger);
const result = await invokeYomitanBackendAction<unknown>(
'getDictionaryInfo',
undefined,
deps,
logger,
);
if (!Array.isArray(result)) {
return [];
}
@@ -1546,7 +1551,12 @@ export async function getYomitanSettingsFull(
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
): Promise<Record<string, unknown> | null> {
const result = await invokeYomitanBackendAction<unknown>('optionsGetFull', undefined, deps, logger);
const result = await invokeYomitanBackendAction<unknown>(
'optionsGetFull',
undefined,
deps,
logger,
);
return isObject(result) ? result : null;
}
@@ -1653,7 +1663,7 @@ export async function upsertYomitanDictionarySettings(
(entry) =>
isObject(entry) &&
typeof (entry as { name?: unknown }).name === 'string' &&
((entry as { name: string }).name.trim() === normalizedTitle),
(entry as { name: string }).name.trim() === normalizedTitle,
);
if (existingIndex >= 0) {

View File

@@ -90,7 +90,10 @@ export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string)
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
}
export function ensureExtensionCopy(sourceDir: string, userDataPath: string): {
export function ensureExtensionCopy(
sourceDir: string,
userDataPath: string,
): {
targetDir: string;
copied: boolean;
} {

View File

@@ -75,7 +75,10 @@ test('ensureExtensionCopy refreshes copied extension when display files change',
assert.equal(result.targetDir, targetDir);
assert.equal(result.copied, true);
assert.equal(
fs.readFileSync(path.join(targetDir, 'js', 'display', 'structured-content-generator.js'), 'utf8'),
fs.readFileSync(
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
'utf8',
),
'new display code',
);
});

View File

@@ -29,7 +29,10 @@ export function getYomitanExtensionSearchPaths(
? path.resolve(options.moduleDir, '..', '..', '..', 'build', 'yomitan')
: null,
);
pushUnique(searchPaths, options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null);
pushUnique(
searchPaths,
options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null,
);
pushUnique(searchPaths, '/usr/share/SubMiner/yomitan');
pushUnique(searchPaths, options.userDataPath ? path.join(options.userDataPath, 'yomitan') : null);

View File

@@ -160,14 +160,10 @@ test('StructuredContentGenerator uses direct img loading for popup glossary imag
assert.ok(yomitanRoot, 'Run `bun run build:yomitan` before Yomitan integration tests.');
const { DisplayContentManager } = await import(
pathToFileURL(
path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js'),
).href
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js')).href
);
const { StructuredContentGenerator } = await import(
pathToFileURL(
path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js'),
).href
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js')).href
);
const createObjectURLCalls: string[] = [];
@@ -201,14 +197,10 @@ test('StructuredContentGenerator uses direct img loading for popup glossary imag
},
});
const generator = new StructuredContentGenerator(
manager,
new FakeDocument(),
{
devicePixelRatio: 1,
navigator: { userAgent: 'Mozilla/5.0' },
},
);
const generator = new StructuredContentGenerator(manager, new FakeDocument(), {
devicePixelRatio: 1,
navigator: { userAgent: 'Mozilla/5.0' },
});
const node = generator.createDefinitionImage(
{

View File

@@ -16,10 +16,7 @@ test('normalizeStartupArgv defaults no-arg startup to --start --background', ()
'--background',
]);
assert.deepEqual(
normalizeStartupArgv(
['SubMiner.AppImage', '--password-store', 'gnome-libsecret'],
{},
),
normalizeStartupArgv(['SubMiner.AppImage', '--password-store', 'gnome-libsecret'], {}),
['SubMiner.AppImage', '--password-store', 'gnome-libsecret', '--start', '--background'],
);
assert.deepEqual(normalizeStartupArgv(['SubMiner.AppImage', '--background'], {}), [

View File

@@ -1657,10 +1657,9 @@ const {
},
});
const maybeFocusExistingFirstRunSetupWindow =
createMaybeFocusExistingFirstRunSetupWindowHandler({
getSetupWindow: () => appState.firstRunSetupWindow,
});
const maybeFocusExistingFirstRunSetupWindow = createMaybeFocusExistingFirstRunSetupWindowHandler({
getSetupWindow: () => appState.firstRunSetupWindow,
});
const openFirstRunSetupWindowHandler = createOpenFirstRunSetupWindowHandler({
maybeFocusExistingSetupWindow: maybeFocusExistingFirstRunSetupWindow,
createSetupWindow: () =>
@@ -2404,9 +2403,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
shouldSkipHeavyStartup: () =>
Boolean(
appState.initialArgs &&
(shouldRunSettingsOnlyStartup(appState.initialArgs) ||
appState.initialArgs.dictionary ||
appState.initialArgs.setup),
(shouldRunSettingsOnlyStartup(appState.initialArgs) ||
appState.initialArgs.dictionary ||
appState.initialArgs.setup),
),
createImmersionTracker: () => {
ensureImmersionTrackerStarted();
@@ -2419,65 +2418,64 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
immersionTrackerStartupMainDeps,
});
const { runAndApplyStartupState } =
runtimeRegistry.startup.createStartupRuntimeHandlers<
CliArgs,
StartupState,
ReturnType<typeof createStartupBootstrapRuntimeDeps>
>({
appLifecycleRuntimeRunnerMainDeps: {
app,
platform: process.platform,
shouldStartApp: (nextArgs: CliArgs) => shouldStartApp(nextArgs),
parseArgs: (argv: string[]) => parseArgs(argv),
handleCliCommand: (nextArgs: CliArgs, source: CliCommandSource) =>
handleCliCommand(nextArgs, source),
printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT),
logNoRunningInstance: () => appLogger.logNoRunningInstance(),
onReady: appReadyRuntimeRunner,
onWillQuitCleanup: () => onWillQuitCleanupHandler(),
shouldRestoreWindowsOnActivate: () => shouldRestoreWindowsOnActivateHandler(),
restoreWindowsOnActivate: () => restoreWindowsOnActivateHandler(),
shouldQuitOnWindowAllClosed: () => !appState.backgroundMode,
const { runAndApplyStartupState } = runtimeRegistry.startup.createStartupRuntimeHandlers<
CliArgs,
StartupState,
ReturnType<typeof createStartupBootstrapRuntimeDeps>
>({
appLifecycleRuntimeRunnerMainDeps: {
app,
platform: process.platform,
shouldStartApp: (nextArgs: CliArgs) => shouldStartApp(nextArgs),
parseArgs: (argv: string[]) => parseArgs(argv),
handleCliCommand: (nextArgs: CliArgs, source: CliCommandSource) =>
handleCliCommand(nextArgs, source),
printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT),
logNoRunningInstance: () => appLogger.logNoRunningInstance(),
onReady: appReadyRuntimeRunner,
onWillQuitCleanup: () => onWillQuitCleanupHandler(),
shouldRestoreWindowsOnActivate: () => shouldRestoreWindowsOnActivateHandler(),
restoreWindowsOnActivate: () => restoreWindowsOnActivateHandler(),
shouldQuitOnWindowAllClosed: () => !appState.backgroundMode,
},
createAppLifecycleRuntimeRunner: (params) => createAppLifecycleRuntimeRunner(params),
buildStartupBootstrapMainDeps: (startAppLifecycle) => ({
argv: process.argv,
parseArgs: (argv: string[]) => parseArgs(argv),
setLogLevel: (level: string, source: LogLevelSource) => {
setLogLevel(level, source);
},
createAppLifecycleRuntimeRunner: (params) => createAppLifecycleRuntimeRunner(params),
buildStartupBootstrapMainDeps: (startAppLifecycle) => ({
argv: process.argv,
parseArgs: (argv: string[]) => parseArgs(argv),
setLogLevel: (level: string, source: LogLevelSource) => {
setLogLevel(level, source);
forceX11Backend: (args: CliArgs) => {
forceX11Backend(args);
},
enforceUnsupportedWaylandMode: (args: CliArgs) => {
enforceUnsupportedWaylandMode(args);
},
shouldStartApp: (args: CliArgs) => shouldStartApp(args),
getDefaultSocketPath: () => getDefaultSocketPath(),
defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT,
configDir: CONFIG_DIR,
defaultConfig: DEFAULT_CONFIG,
generateConfigTemplate: (config: ResolvedConfig) => generateConfigTemplate(config),
generateDefaultConfigFile: (
args: CliArgs,
options: {
configDir: string;
defaultConfig: unknown;
generateTemplate: (config: unknown) => string;
},
forceX11Backend: (args: CliArgs) => {
forceX11Backend(args);
},
enforceUnsupportedWaylandMode: (args: CliArgs) => {
enforceUnsupportedWaylandMode(args);
},
shouldStartApp: (args: CliArgs) => shouldStartApp(args),
getDefaultSocketPath: () => getDefaultSocketPath(),
defaultTexthookerPort: DEFAULT_TEXTHOOKER_PORT,
configDir: CONFIG_DIR,
defaultConfig: DEFAULT_CONFIG,
generateConfigTemplate: (config: ResolvedConfig) => generateConfigTemplate(config),
generateDefaultConfigFile: (
args: CliArgs,
options: {
configDir: string;
defaultConfig: unknown;
generateTemplate: (config: unknown) => string;
},
) => generateDefaultConfigFile(args, options),
setExitCode: (code) => {
process.exitCode = code;
},
quitApp: () => app.quit(),
logGenerateConfigError: (message) => logger.error(message),
startAppLifecycle,
}),
createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps),
runStartupBootstrapRuntime,
applyStartupState: (startupState) => applyStartupState(appState, startupState),
});
) => generateDefaultConfigFile(args, options),
setExitCode: (code) => {
process.exitCode = code;
},
quitApp: () => app.quit(),
logGenerateConfigError: (message) => logger.error(message),
startAppLifecycle,
}),
createStartupBootstrapRuntimeDeps: (deps) => createStartupBootstrapRuntimeDeps(deps),
runStartupBootstrapRuntime,
applyStartupState: (startupState) => applyStartupState(appState, startupState),
});
runAndApplyStartupState();
if (isAnilistTrackingEnabled(getResolvedConfig())) {
@@ -3203,6 +3201,7 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({
shiftSubtitleDelayToAdjacentCueHandler(direction),
sendMpvCommand: (rawCommand: (string | number)[]) =>
sendMpvCommandRuntime(appState.mpvClient, rawCommand),
getMpvClient: () => appState.mpvClient,
isMpvConnected: () => Boolean(appState.mpvClient && appState.mpvClient.connected),
hasRuntimeOptionsManager: () => appState.runtimeOptionsManager !== null,
},
@@ -3341,74 +3340,75 @@ const createCliCommandContextHandler = createCliCommandContextFactory({
});
const { createMainWindow: createMainWindowHandler, createModalWindow: createModalWindowHandler } =
createOverlayWindowRuntimeHandlers<BrowserWindow>({
createOverlayWindowDeps: {
createOverlayWindowCore: (kind, options) => createOverlayWindowCore(kind, options),
isDev,
ensureOverlayWindowLevel: (window) => ensureOverlayWindowLevel(window),
onRuntimeOptionsChanged: () => broadcastRuntimeOptionsChanged(),
setOverlayDebugVisualizationEnabled: (enabled) => setOverlayDebugVisualizationEnabled(enabled),
isOverlayVisible: (windowKind) =>
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
tryHandleOverlayShortcutLocalFallback: (input) =>
overlayShortcutsRuntime.tryHandleOverlayShortcutLocalFallback(input),
onWindowClosed: (windowKind) => {
if (windowKind === 'visible') {
overlayManager.setMainWindow(null);
} else {
overlayManager.setModalWindow(null);
}
createOverlayWindowDeps: {
createOverlayWindowCore: (kind, options) => createOverlayWindowCore(kind, options),
isDev,
ensureOverlayWindowLevel: (window) => ensureOverlayWindowLevel(window),
onRuntimeOptionsChanged: () => broadcastRuntimeOptionsChanged(),
setOverlayDebugVisualizationEnabled: (enabled) =>
setOverlayDebugVisualizationEnabled(enabled),
isOverlayVisible: (windowKind) =>
windowKind === 'visible' ? overlayManager.getVisibleOverlayVisible() : false,
tryHandleOverlayShortcutLocalFallback: (input) =>
overlayShortcutsRuntime.tryHandleOverlayShortcutLocalFallback(input),
onWindowClosed: (windowKind) => {
if (windowKind === 'visible') {
overlayManager.setMainWindow(null);
} else {
overlayManager.setModalWindow(null);
}
},
},
},
setMainWindow: (window) => overlayManager.setMainWindow(window),
setModalWindow: (window) => overlayManager.setModalWindow(window),
});
setMainWindow: (window) => overlayManager.setMainWindow(window),
setModalWindow: (window) => overlayManager.setModalWindow(window),
});
const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
createTrayRuntimeHandlers({
resolveTrayIconPathDeps: {
resolveTrayIconPathRuntime,
platform: process.platform,
resourcesPath: process.resourcesPath,
appPath: app.getAppPath(),
dirname: __dirname,
joinPath: (...parts) => path.join(...parts),
fileExists: (candidate) => fs.existsSync(candidate),
},
buildTrayMenuTemplateDeps: {
buildTrayMenuTemplateRuntime,
initializeOverlayRuntime: () => initializeOverlayRuntime(),
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
showFirstRunSetup: () => !firstRunSetupService.isSetupCompleted(),
openFirstRunSetupWindow: () => openFirstRunSetupWindow(),
openYomitanSettings: () => openYomitanSettings(),
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
openJellyfinSetupWindow: () => openJellyfinSetupWindow(),
openAnilistSetupWindow: () => openAnilistSetupWindow(),
quitApp: () => app.quit(),
},
ensureTrayDeps: {
getTray: () => appTray,
setTray: (tray) => {
appTray = tray as Tray | null;
resolveTrayIconPathDeps: {
resolveTrayIconPathRuntime,
platform: process.platform,
resourcesPath: process.resourcesPath,
appPath: app.getAppPath(),
dirname: __dirname,
joinPath: (...parts) => path.join(...parts),
fileExists: (candidate) => fs.existsSync(candidate),
},
createImageFromPath: (iconPath) => nativeImage.createFromPath(iconPath),
createEmptyImage: () => nativeImage.createEmpty(),
createTray: (icon) => new Tray(icon as ConstructorParameters<typeof Tray>[0]),
trayTooltip: TRAY_TOOLTIP,
platform: process.platform,
logWarn: (message) => logger.warn(message),
initializeOverlayRuntime: () => initializeOverlayRuntime(),
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
},
destroyTrayDeps: {
getTray: () => appTray,
setTray: (tray) => {
appTray = tray as Tray | null;
buildTrayMenuTemplateDeps: {
buildTrayMenuTemplateRuntime,
initializeOverlayRuntime: () => initializeOverlayRuntime(),
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
showFirstRunSetup: () => !firstRunSetupService.isSetupCompleted(),
openFirstRunSetupWindow: () => openFirstRunSetupWindow(),
openYomitanSettings: () => openYomitanSettings(),
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
openJellyfinSetupWindow: () => openJellyfinSetupWindow(),
openAnilistSetupWindow: () => openAnilistSetupWindow(),
quitApp: () => app.quit(),
},
},
buildMenuFromTemplate: (template) => Menu.buildFromTemplate(template),
});
ensureTrayDeps: {
getTray: () => appTray,
setTray: (tray) => {
appTray = tray as Tray | null;
},
createImageFromPath: (iconPath) => nativeImage.createFromPath(iconPath),
createEmptyImage: () => nativeImage.createEmpty(),
createTray: (icon) => new Tray(icon as ConstructorParameters<typeof Tray>[0]),
trayTooltip: TRAY_TOOLTIP,
platform: process.platform,
logWarn: (message) => logger.warn(message),
initializeOverlayRuntime: () => initializeOverlayRuntime(),
isOverlayRuntimeInitialized: () => appState.overlayRuntimeInitialized,
setVisibleOverlayVisible: (visible) => setVisibleOverlayVisible(visible),
},
destroyTrayDeps: {
getTray: () => appTray,
setTray: (tray) => {
appTray = tray as Tray | null;
},
},
buildMenuFromTemplate: (template) => Menu.buildFromTemplate(template),
});
const yomitanExtensionRuntime = createYomitanExtensionRuntime({
loadYomitanExtensionCore,
userDataPath: USER_DATA_PATH,

View File

@@ -563,7 +563,9 @@ test('generateForCurrentMedia reapplies collapsible open states when using cache
content: { content: Array<Record<string, unknown>> };
}
).content.content;
const sections = children.filter((item) => (item as { tag?: string }).tag === 'details') as Array<{
const sections = children.filter(
(item) => (item as { tag?: string }).tag === 'details',
) as Array<{
open?: boolean;
}>;
assert.ok(sections.length >= 2);
@@ -1889,7 +1891,9 @@ test('buildMergedDictionary reapplies collapsible open states from current confi
content: { content: Array<Record<string, unknown>> };
}
).content.content;
const sections = children.filter((item) => (item as { tag?: string }).tag === 'details') as Array<{
const sections = children.filter(
(item) => (item as { tag?: string }).tag === 'details',
) as Array<{
open?: boolean;
}>;
assert.ok(sections.length >= 1);

View File

@@ -502,7 +502,10 @@ function expandRawNameVariants(rawName: string): string[] {
if (!trimmed) return [];
const variants = new Set<string>([trimmed]);
const outer = trimmed.replace(/[(][^()]+[)]/g, ' ').replace(/\s+/g, ' ').trim();
const outer = trimmed
.replace(/[(][^()]+[)]/g, ' ')
.replace(/\s+/g, ' ')
.trim();
if (outer && outer !== trimmed) {
variants.add(outer);
}
@@ -1286,12 +1289,14 @@ async function fetchCharactersForMedia(
if (!node || typeof node.id !== 'number') continue;
const fullName = node.name?.full?.trim() || '';
const nativeName = node.name?.native?.trim() || '';
const alternativeNames = [...new Set(
(node.name?.alternative ?? [])
.filter((value): value is string => typeof value === 'string')
.map((value) => value.trim())
.filter((value) => value.length > 0),
)];
const alternativeNames = [
...new Set(
(node.name?.alternative ?? [])
.filter((value): value is string => typeof value === 'string')
.map((value) => value.trim())
.filter((value) => value.length > 0),
),
];
if (!fullName && !nativeName && alternativeNames.length === 0) continue;
const voiceActors: VoiceActorRecord[] = [];
for (const va of edge?.voiceActors ?? []) {

View File

@@ -186,6 +186,7 @@ export interface MpvCommandRuntimeServiceDepsParams {
mpvPlayNextSubtitle: HandleMpvCommandFromIpcOptions['mpvPlayNextSubtitle'];
shiftSubDelayToAdjacentSubtitle: HandleMpvCommandFromIpcOptions['shiftSubDelayToAdjacentSubtitle'];
mpvSendCommand: HandleMpvCommandFromIpcOptions['mpvSendCommand'];
resolveProxyCommandOsd?: HandleMpvCommandFromIpcOptions['resolveProxyCommandOsd'];
isMpvConnected: HandleMpvCommandFromIpcOptions['isMpvConnected'];
hasRuntimeOptionsManager: HandleMpvCommandFromIpcOptions['hasRuntimeOptionsManager'];
}
@@ -339,6 +340,7 @@ export function createMpvCommandRuntimeServiceDeps(
mpvPlayNextSubtitle: params.mpvPlayNextSubtitle,
shiftSubDelayToAdjacentSubtitle: params.shiftSubDelayToAdjacentSubtitle,
mpvSendCommand: params.mpvSendCommand,
resolveProxyCommandOsd: params.resolveProxyCommandOsd,
isMpvConnected: params.isMpvConnected,
hasRuntimeOptionsManager: params.hasRuntimeOptionsManager,
};

View File

@@ -2,6 +2,12 @@ import type { RuntimeOptionApplyResult, RuntimeOptionId } from '../types';
import { handleMpvCommandFromIpc } from '../core/services';
import { createMpvCommandRuntimeServiceDeps } from './dependencies';
import { SPECIAL_COMMANDS } from '../config';
import { resolveProxyCommandOsdRuntime } from './runtime/mpv-proxy-osd';
type MpvPropertyClientLike = {
connected: boolean;
requestProperty: (name: string) => Promise<unknown>;
};
export interface MpvCommandFromIpcRuntimeDeps {
triggerSubsyncFromConfig: () => void;
@@ -12,6 +18,7 @@ export interface MpvCommandFromIpcRuntimeDeps {
playNextSubtitle: () => void;
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
sendMpvCommand: (command: (string | number)[]) => void;
getMpvClient: () => MpvPropertyClientLike | null;
isMpvConnected: () => boolean;
hasRuntimeOptionsManager: () => boolean;
}
@@ -33,6 +40,8 @@ export function handleMpvCommandFromIpcRuntime(
shiftSubDelayToAdjacentSubtitle: (direction) =>
deps.shiftSubDelayToAdjacentSubtitle(direction),
mpvSendCommand: deps.sendMpvCommand,
resolveProxyCommandOsd: (nextCommand) =>
resolveProxyCommandOsdRuntime(nextCommand, deps.getMpvClient),
isMpvConnected: deps.isMpvConnected,
hasRuntimeOptionsManager: deps.hasRuntimeOptionsManager,
}),

View File

@@ -75,5 +75,7 @@ test('createRegisterSubminerProtocolClientHandler keeps unsupported registration
});
register();
assert.deepEqual(calls, ['debug:Failed to register default protocol handler for subminer:// URLs']);
assert.deepEqual(calls, [
'debug:Failed to register default protocol handler for subminer:// URLs',
]);
});

View File

@@ -172,7 +172,10 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
? String(existing.revision)
: null;
const shouldImport =
merged !== null || existing === null || existingRevision === null || existingRevision !== revision;
merged !== null ||
existing === null ||
existingRevision === null ||
existingRevision !== revision;
if (shouldImport) {
if (existing !== null) {

View File

@@ -16,6 +16,7 @@ test('composeIpcRuntimeHandlers returns callable IPC handlers and registration b
playNextSubtitle: () => {},
shiftSubDelayToAdjacentSubtitle: async () => {},
sendMpvCommand: () => {},
getMpvClient: () => null,
isMpvConnected: () => false,
hasRuntimeOptionsManager: () => true,
},

View File

@@ -79,7 +79,10 @@ test('installFirstRunPluginToDefaultLocation installs plugin and backs up existi
const scriptsDirEntries = fs.readdirSync(installPaths.scriptsDir);
const scriptOptsEntries = fs.readdirSync(installPaths.scriptOptsDir);
assert.equal(scriptsDirEntries.some((entry) => entry.startsWith('subminer.bak.')), true);
assert.equal(
scriptsDirEntries.some((entry) => entry.startsWith('subminer.bak.')),
true,
);
assert.equal(
scriptOptsEntries.some((entry) => entry.startsWith('subminer.conf.bak.')),
true,

View File

@@ -3,10 +3,7 @@ import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import {
createFirstRunSetupService,
shouldAutoOpenFirstRunSetup,
} from './first-run-setup-service';
import { createFirstRunSetupService, shouldAutoOpenFirstRunSetup } from './first-run-setup-service';
import type { CliArgs } from '../../cli/args';
function withTempDir(fn: (dir: string) => Promise<void> | void): Promise<void> | void {

View File

@@ -43,39 +43,39 @@ export interface FirstRunSetupService {
function hasAnyStartupCommandBeyondSetup(args: CliArgs): boolean {
return Boolean(
args.toggle ||
args.toggleVisibleOverlay ||
args.settings ||
args.show ||
args.hide ||
args.showVisibleOverlay ||
args.hideVisibleOverlay ||
args.copySubtitle ||
args.copySubtitleMultiple ||
args.mineSentence ||
args.mineSentenceMultiple ||
args.updateLastCardFromClipboard ||
args.refreshKnownWords ||
args.toggleSecondarySub ||
args.triggerFieldGrouping ||
args.triggerSubsync ||
args.markAudioCard ||
args.openRuntimeOptions ||
args.anilistStatus ||
args.anilistLogout ||
args.anilistSetup ||
args.anilistRetryQueue ||
args.dictionary ||
args.jellyfin ||
args.jellyfinLogin ||
args.jellyfinLogout ||
args.jellyfinLibraries ||
args.jellyfinItems ||
args.jellyfinSubtitles ||
args.jellyfinPlay ||
args.jellyfinRemoteAnnounce ||
args.jellyfinPreviewAuth ||
args.texthooker ||
args.help
args.toggleVisibleOverlay ||
args.settings ||
args.show ||
args.hide ||
args.showVisibleOverlay ||
args.hideVisibleOverlay ||
args.copySubtitle ||
args.copySubtitleMultiple ||
args.mineSentence ||
args.mineSentenceMultiple ||
args.updateLastCardFromClipboard ||
args.refreshKnownWords ||
args.toggleSecondarySub ||
args.triggerFieldGrouping ||
args.triggerSubsync ||
args.markAudioCard ||
args.openRuntimeOptions ||
args.anilistStatus ||
args.anilistLogout ||
args.anilistSetup ||
args.anilistRetryQueue ||
args.dictionary ||
args.jellyfin ||
args.jellyfinLogin ||
args.jellyfinLogout ||
args.jellyfinLibraries ||
args.jellyfinItems ||
args.jellyfinSubtitles ||
args.jellyfinPlay ||
args.jellyfinRemoteAnnounce ||
args.jellyfinPreviewAuth ||
args.texthooker ||
args.help,
);
}
@@ -85,7 +85,10 @@ export function shouldAutoOpenFirstRunSetup(args: CliArgs): boolean {
return !hasAnyStartupCommandBeyondSetup(args);
}
function getPluginStatus(state: SetupState, pluginInstalled: boolean): SetupStatusSnapshot['pluginStatus'] {
function getPluginStatus(
state: SetupState,
pluginInstalled: boolean,
): SetupStatusSnapshot['pluginStatus'] {
if (pluginInstalled) return 'installed';
if (state.pluginInstallStatus === 'skipped') return 'skipped';
if (state.pluginInstallStatus === 'failed') return 'failed';

View File

@@ -253,7 +253,9 @@ export function createHandleFirstRunSetupNavigationHandler(deps: {
};
}
export function createOpenFirstRunSetupWindowHandler<TWindow extends FirstRunSetupWindowLike>(deps: {
export function createOpenFirstRunSetupWindowHandler<
TWindow extends FirstRunSetupWindowLike,
>(deps: {
maybeFocusExistingSetupWindow: () => boolean;
createSetupWindow: () => TWindow;
getSetupSnapshot: () => Promise<FirstRunSetupHtmlModel>;
@@ -279,9 +281,7 @@ export function createOpenFirstRunSetupWindowHandler<TWindow extends FirstRunSet
const render = async (): Promise<void> => {
const model = await deps.getSetupSnapshot();
const html = deps.buildSetupHtml(model);
await setupWindow.loadURL(
`data:text/html;charset=utf-8,${deps.encodeURIComponent(html)}`,
);
await setupWindow.loadURL(`data:text/html;charset=utf-8,${deps.encodeURIComponent(html)}`);
};
const handleNavigation = createHandleFirstRunSetupNavigationHandler({

View File

@@ -19,6 +19,7 @@ test('ipc bridge action main deps builders map callbacks', async () => {
playNextSubtitle: () => {},
shiftSubDelayToAdjacentSubtitle: async () => {},
sendMpvCommand: () => {},
getMpvClient: () => null,
isMpvConnected: () => true,
hasRuntimeOptionsManager: () => true,
}),

View File

@@ -16,6 +16,7 @@ test('handle mpv command handler forwards command and built deps', () => {
playNextSubtitle: () => {},
shiftSubDelayToAdjacentSubtitle: async () => {},
sendMpvCommand: () => {},
getMpvClient: () => null,
isMpvConnected: () => true,
hasRuntimeOptionsManager: () => true,
};

View File

@@ -15,6 +15,7 @@ test('ipc mpv command main deps builder maps callbacks', () => {
calls.push(`shift:${direction}`);
},
sendMpvCommand: (command) => calls.push(`cmd:${command.join(':')}`),
getMpvClient: () => ({ connected: true, requestProperty: async () => null }),
isMpvConnected: () => true,
hasRuntimeOptionsManager: () => false,
})();
@@ -27,6 +28,7 @@ test('ipc mpv command main deps builder maps callbacks', () => {
deps.playNextSubtitle();
void deps.shiftSubDelayToAdjacentSubtitle('next');
deps.sendMpvCommand(['show-text', 'ok']);
assert.equal(typeof deps.getMpvClient()?.requestProperty, 'function');
assert.equal(deps.isMpvConnected(), true);
assert.equal(deps.hasRuntimeOptionsManager(), false);
assert.deepEqual(calls, [

View File

@@ -12,6 +12,7 @@ export function createBuildMpvCommandFromIpcRuntimeMainDepsHandler(
playNextSubtitle: () => deps.playNextSubtitle(),
shiftSubDelayToAdjacentSubtitle: (direction) => deps.shiftSubDelayToAdjacentSubtitle(direction),
sendMpvCommand: (command: (string | number)[]) => deps.sendMpvCommand(command),
getMpvClient: () => deps.getMpvClient(),
isMpvConnected: () => deps.isMpvConnected(),
hasRuntimeOptionsManager: () => deps.hasRuntimeOptionsManager(),
});

View File

@@ -0,0 +1,33 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { resolveProxyCommandOsdRuntime } from './mpv-proxy-osd';
function createClient() {
return {
connected: true,
requestProperty: async (name: string) => {
if (name === 'sid') return 3;
if (name === 'secondary-sid') return 8;
if (name === 'track-list') {
return [
{ id: 3, type: 'sub', title: 'Japanese', selected: true, external: false },
{ id: 8, type: 'sub', title: 'English Commentary', external: true },
];
}
return null;
},
};
}
test('resolveProxyCommandOsdRuntime formats the active primary subtitle track label', async () => {
const result = await resolveProxyCommandOsdRuntime(['cycle', 'sid'], () => createClient());
assert.equal(result, 'Subtitle track: Internal #3 - Japanese (active)');
});
test('resolveProxyCommandOsdRuntime formats the active secondary subtitle track label', async () => {
const result = await resolveProxyCommandOsdRuntime(
['set_property', 'secondary-sid', 'auto'],
() => createClient(),
);
assert.equal(result, 'Secondary subtitle track: External #8 - English Commentary');
});

View File

@@ -0,0 +1,100 @@
type MpvPropertyClientLike = {
connected: boolean;
requestProperty: (name: string) => Promise<unknown>;
};
type MpvSubtitleTrack = {
id?: number;
type?: string;
selected?: boolean;
external?: boolean;
lang?: string;
title?: string;
};
function getTrackOsdPrefix(command: (string | number)[]): string | null {
const operation = typeof command[0] === 'string' ? command[0] : '';
const property = typeof command[1] === 'string' ? command[1] : '';
const modifiesProperty =
operation === 'add' ||
operation === 'set' ||
operation === 'set_property' ||
operation === 'cycle' ||
operation === 'cycle-values' ||
operation === 'multiply';
if (!modifiesProperty) return null;
if (property === 'sid') return 'Subtitle track';
if (property === 'secondary-sid') return 'Secondary subtitle track';
return null;
}
function parseTrackId(value: unknown): number | null {
if (typeof value === 'number' && Number.isInteger(value)) {
return value;
}
if (typeof value === 'string') {
const trimmed = value.trim();
if (!trimmed.length || trimmed === 'no' || trimmed === 'auto') {
return null;
}
const parsed = Number(trimmed);
if (Number.isInteger(parsed)) {
return parsed;
}
}
return null;
}
function normalizeTrackList(trackListRaw: unknown): MpvSubtitleTrack[] {
if (!Array.isArray(trackListRaw)) return [];
return trackListRaw
.filter(
(track): track is Record<string, unknown> => Boolean(track) && typeof track === 'object',
)
.map((track) => {
const id = parseTrackId(track.id);
return {
...track,
id: id === null ? undefined : id,
} as MpvSubtitleTrack;
});
}
function formatSubtitleTrackLabel(track: MpvSubtitleTrack): string {
const trackId = typeof track.id === 'number' ? track.id : -1;
const source = track.external ? 'External' : 'Internal';
const label = track.lang || track.title || 'unknown';
const active = track.selected ? ' (active)' : '';
return `${source} #${trackId} - ${label}${active}`;
}
export async function resolveProxyCommandOsdRuntime(
command: (string | number)[],
getMpvClient: () => MpvPropertyClientLike | null,
): Promise<string | null> {
const prefix = getTrackOsdPrefix(command);
if (!prefix) return null;
const client = getMpvClient();
if (!client?.connected) return null;
const property = prefix === 'Subtitle track' ? 'sid' : 'secondary-sid';
const [trackListRaw, trackIdRaw] = await Promise.all([
client.requestProperty('track-list'),
client.requestProperty(property),
]);
const trackId = parseTrackId(trackIdRaw);
if (trackId === null) {
return `${prefix}: none`;
}
const track = normalizeTrackList(trackListRaw).find(
(entry) => entry.type === 'sub' && entry.id === trackId,
);
if (!track) {
return `${prefix}: #${trackId}`;
}
return `${prefix}: ${formatSubtitleTrackLabel(track)}`;
}

View File

@@ -516,11 +516,11 @@ body.settings-modal-open #subtitleContainer {
}
#subtitleRoot
.word:not(.word-known):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(
.word-frequency-band-1
):not(.word-frequency-band-2):not(.word-frequency-band-3):not(.word-frequency-band-4):not(
.word-frequency-band-5
):hover {
.word:not(.word-known):not(.word-n-plus-one):not(.word-name-match):not(
.word-frequency-single
):not(.word-frequency-band-1):not(.word-frequency-band-2):not(.word-frequency-band-3):not(
.word-frequency-band-4
):not(.word-frequency-band-5):hover {
background: var(--subtitle-hover-token-background-color, rgba(54, 58, 79, 0.84));
border-radius: 3px;
color: var(--subtitle-hover-token-color, #f4dbd6) !important;
@@ -558,9 +558,11 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5):hover {
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(
.word-frequency-band-1
):not(.word-frequency-band-2):not(.word-frequency-band-3):not(.word-frequency-band-4):not(
.word-frequency-band-5
):hover {
color: var(--subtitle-hover-token-color, #f4dbd6) !important;
-webkit-text-fill-color: var(--subtitle-hover-token-color, #f4dbd6) !important;
}
@@ -636,15 +638,19 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5)::selection,
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(
.word-frequency-band-1
):not(.word-frequency-band-2):not(.word-frequency-band-3):not(.word-frequency-band-4):not(
.word-frequency-band-5
)::selection,
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5)
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(
.word-frequency-band-1
):not(.word-frequency-band-2):not(.word-frequency-band-3):not(.word-frequency-band-4):not(
.word-frequency-band-5
)
.c::selection {
color: var(--subtitle-hover-token-color, #f4dbd6) !important;
-webkit-text-fill-color: var(--subtitle-hover-token-color, #f4dbd6) !important;

View File

@@ -114,7 +114,8 @@ function installFakeDocument() {
function collectWordNodes(root: FakeElement): FakeElement[] {
return root.childNodes.filter(
(child): child is FakeElement => child instanceof FakeElement && child.className.includes('word'),
(child): child is FakeElement =>
child instanceof FakeElement && child.className.includes('word'),
);
}

View File

@@ -265,10 +265,7 @@ function renderWithTokens(
span.dataset.tokenIndex = String(segment.tokenIndex);
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
const frequencyRankLabel = getFrequencyRankLabelForToken(
token,
resolvedTokenRenderSettings,
);
const frequencyRankLabel = getFrequencyRankLabelForToken(token, resolvedTokenRenderSettings);
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
}
@@ -304,10 +301,7 @@ function renderWithTokens(
span.dataset.tokenIndex = String(index);
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
const frequencyRankLabel = getFrequencyRankLabelForToken(
token,
resolvedTokenRenderSettings,
);
const frequencyRankLabel = getFrequencyRankLabelForToken(token, resolvedTokenRenderSettings);
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
}
@@ -413,10 +407,7 @@ export function computeWordClass(
tokenRenderSettings?.bandedColors,
DEFAULT_FREQUENCY_RENDER_SETTINGS.bandedColors,
),
topX: sanitizeFrequencyTopX(
tokenRenderSettings?.topX,
DEFAULT_FREQUENCY_RENDER_SETTINGS.topX,
),
topX: sanitizeFrequencyTopX(tokenRenderSettings?.topX, DEFAULT_FREQUENCY_RENDER_SETTINGS.topX),
singleColor: sanitizeHexColor(
tokenRenderSettings?.singleColor,
DEFAULT_FREQUENCY_RENDER_SETTINGS.singleColor,

View File

@@ -43,7 +43,10 @@ test('ensureDefaultConfigBootstrap creates config dir and default jsonc only whe
});
assert.equal(fs.existsSync(configDir), true);
assert.equal(fs.readFileSync(path.join(configDir, 'config.jsonc'), 'utf8'), '{\n "logging": {}\n}\n');
assert.equal(
fs.readFileSync(path.join(configDir, 'config.jsonc'), 'utf8'),
'{\n "logging": {}\n}\n',
);
fs.writeFileSync(path.join(configDir, 'config.json'), '{"keep":true}\n');
fs.rmSync(path.join(configDir, 'config.jsonc'));

View File

@@ -162,7 +162,10 @@ export function ensureDefaultConfigBootstrap(options: {
const writeFileSync = options.writeFileSync ?? fs.writeFileSync;
mkdirSync(options.configDir, { recursive: true });
if (existsSync(options.configFilePaths.jsoncPath) || existsSync(options.configFilePaths.jsonPath)) {
if (
existsSync(options.configFilePaths.jsoncPath) ||
existsSync(options.configFilePaths.jsonPath)
) {
return;
}
@@ -178,7 +181,7 @@ export function resolveDefaultMpvInstallPaths(
platform === 'darwin'
? path.join(homeDir, 'Library', 'Application Support', 'mpv')
: platform === 'linux'
? path.join((xdgConfigHome?.trim() || path.join(homeDir, '.config')), 'mpv')
? path.join(xdgConfigHome?.trim() || path.join(homeDir, '.config'), 'mpv')
: path.join(homeDir, 'AppData', 'Roaming', 'mpv');
return {