Compare commits

...

11 Commits

Author SHA1 Message Date
94abd0f372 Deduplicate voice actor image downloads per AniList person ID
- Avoid repeated downloads when multiple characters share the same voice actor
- Add coverage for shared voice actor image download behavior
2026-03-06 17:21:19 -08:00
4d60f64bea Gate subtitle name highlighting on character dictionary setting
- Disable `getNameMatchEnabled` when `anilist.characterDictionary.enabled` is false
- Wire character-dictionary enablement into main subtitle tokenization deps
- Add runtime deps test coverage and record task/plan docs
2026-03-06 17:13:56 -08:00
dbd6803623 chore: update acknowledgments and fix config quote style 2026-03-06 16:42:42 -08:00
5ff4cc21bd feat(dictionary): structured glossary with voice actors and collapsible sections
- Add voice actor data (Japanese VA) from AniList GraphQL query
- Redesign glossary entries as structured-content with role badges,
  collapsible Description/Character Information/Voiced-by sections
- Parse __Key:__ fields from AniList descriptions into structured info
- Download and embed voice actor images alongside character images
- Bump format version to 12
2026-03-06 16:41:58 -08:00
82bec02a36 feat(subtitles): highlight character-name tokens 2026-03-06 16:38:19 -08:00
c548044c61 docs(backlog): close TASK-87.4 2026-03-06 11:05:47 -08:00
39976c03f9 refactor: remove dead registry and pipeline modules 2026-03-06 11:04:17 -08:00
e659b5d8f4 refactor(runtime): remove dead symbols from composition roots 2026-03-06 10:56:20 -08:00
85bd6c6ec2 refactor: extract anki integration runtime 2026-03-06 09:30:15 -08:00
6fe6976dc9 test: relax launcher smoke timeout budget 2026-03-06 07:52:26 -08:00
e6150e9513 fix: update default frequency band colors 2026-03-06 07:51:58 -08:00
58 changed files with 2044 additions and 841 deletions

View File

@@ -1,3 +1,4 @@
<!-- BACKLOG.MD MCP GUIDELINES START -->
<CRITICAL_INSTRUCTION>
@@ -16,7 +17,6 @@ This project uses Backlog.md MCP for all task and project management activities.
- **When to read it**: BEFORE creating tasks, or when you're unsure whether to track work
These guides cover:
- Decision framework for when to create tasks
- Search-first workflow to avoid duplicates
- Links to detailed guides for task creation, execution, and finalization

View File

@@ -109,7 +109,7 @@ The Bun-managed discovery lanes intentionally exclude a small set of suites that
## Acknowledgments
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [texthooker-ui](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [autosubsync-mpv](https://github.com/joaquintorres/autosubsync-mpv). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
Built on the shoulders of [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [mpvacious](https://github.com/Ajatt-Tools/mpvacious), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles powered by [Jimaku.cc](https://jimaku.cc). Dictionary lookups via [Yomitan](https://github.com/yomidevs/yomitan).
## License

View File

@@ -1,11 +1,11 @@
project_name: 'SubMiner'
default_status: 'To Do'
statuses: ['To Do', 'In Progress', 'Done']
project_name: "SubMiner"
default_status: "To Do"
statuses: ["To Do", "In Progress", "Done"]
labels: []
definition_of_done: []
date_format: yyyy-mm-dd
max_column_width: 20
default_editor: 'nvim'
default_editor: "nvim"
auto_open_browser: false
default_port: 6420
remote_operations: true
@@ -13,4 +13,4 @@ auto_commit: false
bypass_git_hooks: false
check_active_branches: true
active_branch_days: 30
task_prefix: 'task'
task_prefix: "task"

View File

@@ -3,10 +3,10 @@ id: TASK-87
title: >-
Codebase health: harden verification and retire dead architecture identified
in the March 2026 review
status: To Do
status: In Progress
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:20'
updated_date: '2026-03-06 11:11'
labels:
- tech-debt
- tests
@@ -19,9 +19,10 @@ references:
- src/main.ts
- src/anki-integration.ts
- src/core/services/immersion-tracker-service.test.ts
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
- src/translators/index.ts
- src/subsync/engines.ts
- src/subtitle/pipeline.ts
- backlog/tasks/task-87.5 - Dead-architecture-cleanup-delete-unused-registry-and-pipeline-modules-that-are-off-the-live-path.md
documentation:
- docs/reports/2026-02-22-task-100-dead-code-report.md
priority: high
@@ -69,3 +70,10 @@ Shared review context to restate in child tasks:
- src/main.ts trips many noUnusedLocals/noUnusedParameters diagnostics.
- src/translators/index.ts, src/subsync/engines.ts, src/subtitle/pipeline.ts, src/tokenizers/index.ts, and src/token-mergers/index.ts appeared unreferenced during review and must be re-verified before deletion.
<!-- SECTION:PLAN:END -->
## Progress Notes
- `TASK-87.5` is complete. The isolated dead registry/pipeline modules were re-verified as off the maintained runtime path and removed.
- Live subtitle tokenization now owns the zero-width separator normalization that previously only existed in the dead subtitle pipeline path, so the cleanup did not drop that behavior.
- Verification completed for the cleanup slice with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.
- Remaining parent-task scope still includes the broader verification hardening, `src/main.ts` dead-symbol cleanup, and `src/anki-integration.ts` decomposition work tracked by the other child tasks.

View File

@@ -3,10 +3,10 @@ id: TASK-87.4
title: >-
Runtime composition root: remove dead symbols and tighten module boundaries in
src/main.ts
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:19'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 18:10'
labels:
- tech-debt
- runtime
@@ -36,10 +36,10 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
<!-- AC:BEGIN -->
- [ ] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
- [ ] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
- [ ] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
- [ ] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
- [x] #1 src/main.ts no longer emits dead-symbol diagnostics under a noUnusedLocals/noUnusedParameters compile pass for the areas touched by this cleanup.
- [x] #2 Unused imports, destructured values, and stale locals identified in the current composition root are removed or relocated without behavior changes.
- [x] #3 The resulting composition root has clearer ownership boundaries for at least one runtime slice that is currently buried in the monolith.
- [x] #4 Relevant runtime and startup verification commands pass after the cleanup, and any command changes are documented if needed.
<!-- AC:END -->
## Implementation Plan
@@ -51,3 +51,13 @@ A noUnusedLocals/noUnusedParameters compile pass reports a large concentration o
3. Keep changes behavior-preserving and avoid mixing unrelated cleanup outside src/main.ts unless required to compile.
4. Verify with the updated runtime/startup test commands from TASK-87.1 plus a noUnused compile pass.
<!-- SECTION:PLAN:END -->
## Completion Notes
- Removed the dead import/destructure backlog from `src/main.ts` and deleted stale wrapper seams that no longer owned runtime behavior after the composer/runtime extractions.
- Tightened module boundaries so the composition root depends on the composed/public runtime surfaces it actually uses instead of retaining unused lower-level domain factory symbols.
- Cleared the remaining strict `noUnusedLocals`/`noUnusedParameters` failures in nearby touched files required for a clean repo-wide pass: `launcher/commands/playback-command.ts`, `src/anki-integration.ts`, `src/anki-integration/field-grouping-workflow.ts`, `src/core/services/tokenizer/yomitan-parser-runtime.test.ts`, and `src/main/runtime/composers/composer-contracts.type-test.ts`.
- Verification:
- `bunx tsc --noEmit -p tsconfig.typecheck.json --noUnusedLocals --noUnusedParameters --pretty false`
- `bun run test:fast`
- Commit: `e659b5d` (`refactor(runtime): remove dead symbols from composition roots`)

View File

@@ -3,10 +3,10 @@ id: TASK-87.5
title: >-
Dead architecture cleanup: delete unused registry and pipeline modules that
are off the live path
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 11:05'
labels:
- tech-debt
- dead-code
@@ -40,10 +40,10 @@ The review found several modules that appear self-contained but unused from the
<!-- AC:BEGIN -->
- [ ] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
- [ ] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
- [ ] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
- [ ] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
- [x] #1 Each candidate module identified in the review is either removed as dead code or justified and reconnected to a real supported execution path.
- [x] #2 Any stale exports, imports, or tests associated with the removed or consolidated modules are cleaned up so the codebase has a single obvious path for the affected behavior.
- [x] #3 The cleanup does not regress live tokenization or subtitle sync behavior and the relevant verification commands remain green.
- [x] #4 Contributor-facing documentation or internal notes no longer imply that removed duplicate architecture is part of the current design.
<!-- AC:END -->
## Implementation Plan
@@ -55,3 +55,10 @@ The review found several modules that appear self-contained but unused from the
3. Pay special attention to subtitle sync and tokenization surfaces, since duplicate architecture exists near active code.
4. Verify the relevant tokenization and subsync commands/tests still pass and update any stale docs or notes.
<!-- SECTION:PLAN:END -->
## Implementation Notes
- Traced imports from `src/main.ts`, `src/main/runtime/**`, `src/core/services/subsync-runner.ts`, and `src/core/services/tokenizer.ts`; confirmed the candidate registry/pipeline modules were isolated from the maintained runtime path.
- Deleted dead modules: `src/translators/index.ts`, `src/subsync/engines.ts`, `src/subtitle/pipeline.ts`, `src/subtitle/stages/{merge,normalize,tokenize}.ts`, `src/subtitle/stages/normalize.test.ts`, `src/tokenizers/index.ts`, and `src/token-mergers/index.ts`.
- Moved the useful zero-width separator normalization into the live tokenizer path in `src/core/services/tokenizer.ts` and added regression coverage plus a repository-level dead-architecture guard in `src/dead-architecture-cleanup.test.ts`.
- Verified with `bun test src/core/services/tokenizer.test.ts`, `bun test src/dead-architecture-cleanup.test.ts`, `bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts`, `bun run tsc`, and `bun run test:src`.

View File

@@ -3,10 +3,10 @@ id: TASK-87.6
title: >-
Anki integration maintainability: continue decomposing the oversized
orchestration layer
status: To Do
status: Done
assignee: []
created_date: '2026-03-06 03:20'
updated_date: '2026-03-06 03:21'
updated_date: '2026-03-06 09:23'
labels:
- tech-debt
- anki
@@ -40,10 +40,10 @@ src/anki-integration.ts remains an oversized orchestration file even after earli
<!-- AC:BEGIN -->
- [ ] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
- [ ] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
- [ ] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
- [ ] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
- [x] #1 The responsibilities currently concentrated in src/anki-integration.ts are split into clearer modules or services with narrow ownership boundaries.
- [x] #2 The resulting orchestration surface is materially smaller and easier to review, with at least one mixed-responsibility cluster extracted behind a well-named interface.
- [x] #3 Existing Anki integration behavior remains covered by automated verification, including note update, field grouping, and proxy-related flows that the refactor touches.
- [x] #4 Any developer-facing docs or notes needed to understand the new structure are updated in the same task.
<!-- AC:END -->
## Implementation Plan

View File

@@ -0,0 +1,39 @@
---
id: TASK-97
title: Add configurable character-name token highlighting
status: Done
assignee: []
created_date: '2026-03-06 10:15'
updated_date: '2026-03-06 10:15'
labels:
- subtitle
- dictionary
- renderer
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
- >-
/home/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer/yomitan-parser-runtime.ts
- /home/sudacode/projects/japanese/SubMiner/src/renderer/subtitle-render.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Color subtitle tokens that match entries from the SubMiner character dictionary, with a configurable default color and a config toggle that disables both rendering and name-match detection work.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Tokens matched from the SubMiner character dictionary receive dedicated renderer styling.
- [x] #2 `subtitleStyle.nameMatchEnabled` disables name-match detection work when false.
- [x] #3 `subtitleStyle.nameMatchColor` overrides the default `#f5bde6`.
- [x] #4 Regression coverage verifies config parsing, tokenizer propagation, scanner gating, and renderer class/CSS behavior.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Added configurable character-name token highlighting with default color `#f5bde6` and config gate `subtitleStyle.nameMatchEnabled`. When enabled, left-to-right Yomitan scanning tags tokens whose winning dictionary entry comes from the SubMiner character dictionary; when disabled, the tokenizer skips that metadata work and the renderer suppresses name-match styling. Added focused regression tests for config parsing, main-deps wiring, Yomitan scan gating, token propagation, renderer classes, and CSS behavior.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,60 @@
---
id: TASK-98
title: Gate subtitle character-name highlighting on character dictionary enablement
status: Done
assignee:
- codex
created_date: '2026-03-07 00:54'
updated_date: '2026-03-07 00:56'
labels:
- subtitle
- character-dictionary
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/main.ts
- /Users/sudacode/projects/japanese/SubMiner/src/core/services/tokenizer.ts
- >-
/Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-subtitle.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Ensure subtitle tokenization and other annotations continue to work, but character-name lookup/highlighting is disabled whenever the AniList character dictionary feature is disabled. This avoids unnecessary name-match processing when the backing dictionary is unavailable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 When anilist.characterDictionary.enabled is false, subtitle tokenization does not request character-name match metadata or highlight character names.
- [x] #2 When anilist.characterDictionary.enabled is true and subtitleStyle.nameMatchEnabled is true, existing character-name matching behavior remains enabled.
- [x] #3 Subtitle tokenization, JLPT, frequency, and other non-name annotation behavior remain unchanged when character dictionaries are disabled.
- [x] #4 Automated tests cover the runtime gating behavior.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a failing test in `src/main/runtime/subtitle-tokenization-main-deps.test.ts` proving name-match enablement resolves to false when `anilist.characterDictionary.enabled` is false even if `subtitleStyle.nameMatchEnabled` is true.
2. Update `src/main/runtime/subtitle-tokenization-main-deps.ts` and `src/main.ts` so subtitle tokenization only enables name matching when both the subtitle setting and the character dictionary setting are enabled.
3. Run focused Bun tests for the updated runtime deps and subtitle processing seams.
4. If verification stays green, check off acceptance criteria and record the result.
Implementation plan saved in `docs/plans/2026-03-06-character-name-gating.md`.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Created plan doc `docs/plans/2026-03-06-character-name-gating.md` after user approved the narrow runtime-gating approach. Proceeding with TDD from the subtitle tokenization main-deps seam.
Implemented the gate at the subtitle tokenization runtime-deps boundary so `getNameMatchEnabled` is false unless both `subtitleStyle.nameMatchEnabled` and `anilist.characterDictionary.enabled` are true.
Verification: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`, `bun test src/core/services/subtitle-processing-controller.test.ts`, `bun run typecheck`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Character-name lookup/highlighting is now suppressed when the AniList character dictionary is disabled, while subtitle tokenization and other annotation paths remain active. Added focused runtime-deps coverage and wired the main runtime to pass the character-dictionary enabled flag into subtitle tokenization.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -139,7 +139,7 @@
"mode": "single", // single: use one color for all matching tokens. banded: use color ramp by frequency band. Values: single | banded
"matchMode": "headword", // Frequency lookup text selection mode. Values: headword | surface
"singleColor": "#f5a97f", // Color used when frequencyDictionary.mode is `single`.
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#a6e3a1", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
"bandedColors": ["#ed8796", "#f5a97f", "#f9e2af", "#8bd5ca", "#8aadf4"], // Five colors used for rank bands when mode is `banded` (from most common to least within topX).
}, // Frequency dictionary setting.
"secondary": {
"fontFamily": "Inter, Noto Sans, Helvetica Neue, sans-serif", // Font family setting.

28
docs/anki-integration.md Normal file
View File

@@ -0,0 +1,28 @@
# Anki Integration
read_when:
- changing `src/anki-integration.ts`
- changing Anki transport/config hot-reload behavior
- tracing note update, field grouping, or proxy ownership
## Ownership
- `src/anki-integration.ts`: thin facade; wires dependencies; exposes public Anki API used by runtime/services.
- `src/anki-integration/runtime.ts`: normalized config state, polling-vs-proxy transport lifecycle, runtime config patch handling.
- `src/anki-integration/card-creation.ts`: sentence/audio card creation and clipboard update flow.
- `src/anki-integration/note-update-workflow.ts`: enrich newly added notes.
- `src/anki-integration/field-grouping.ts`: preview/build helpers for Kiku field grouping.
- `src/anki-integration/field-grouping-workflow.ts`: auto/manual merge execution.
- `src/anki-integration/anki-connect-proxy.ts`: local proxy transport for post-add enrichment.
- `src/anki-integration/known-word-cache.ts`: known-word cache lifecycle and persistence.
## Refactor seam
`AnkiIntegrationRuntime` owns the cluster that previously mixed:
- config normalization/defaulting
- polling vs proxy startup/shutdown
- transport restart decisions during runtime patches
- known-word cache lifecycle toggles tied to config changes
Keep new orchestration work in `runtime.ts` when it changes process-level Anki state. Keep note/card behavior in the workflow/service modules.

View File

@@ -0,0 +1,50 @@
# Character Name Gating Implementation Plan
> **For Claude:** REQUIRED SUB-SKILL: Use superpowers:executing-plans to implement this plan task-by-task.
**Goal:** Disable subtitle character-name lookup/highlighting when the AniList character dictionary feature is disabled, while keeping tokenization and all other annotations working.
**Architecture:** Gate `getNameMatchEnabled` at the runtime-deps boundary used by subtitle tokenization. Keep the tokenizer pipeline intact and only suppress character-name metadata requests when `anilist.characterDictionary.enabled` is false, regardless of `subtitleStyle.nameMatchEnabled`.
**Tech Stack:** TypeScript, Bun test runner, Electron main/runtime wiring.
---
### Task 1: Add runtime gating coverage
**Files:**
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.test.ts`
**Step 1: Write the failing test**
Add a test proving `getNameMatchEnabled()` resolves to `false` when `getCharacterDictionaryEnabled()` is `false` even if `getNameMatchEnabled()` is `true`.
**Step 2: Run test to verify it fails**
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: FAIL because the deps builder does not yet combine the two flags.
### Task 2: Implement minimal runtime gate
**Files:**
- Modify: `src/main/runtime/subtitle-tokenization-main-deps.ts`
- Modify: `src/main.ts`
**Step 3: Write minimal implementation**
Add `getCharacterDictionaryEnabled` to the main handler deps and make the built `getNameMatchEnabled` return true only when both the subtitle setting and the character dictionary setting are enabled.
**Step 4: Run tests to verify green**
Run: `bun test src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: PASS.
### Task 3: Verify no regressions in related tokenization seams
**Files:**
- Modify: none unless failures reveal drift
**Step 5: Run focused verification**
Run: `bun test src/core/services/subtitle-processing-controller.test.ts src/main/runtime/subtitle-tokenization-main-deps.test.ts`
Expected: PASS.

View File

@@ -1,5 +1,4 @@
import fs from 'node:fs';
import path from 'node:path';
import { fail, log } from '../log.js';
import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from '../util.js';
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';

View File

@@ -25,6 +25,9 @@ type SmokeCase = {
mpvOverlayLogPath: string;
};
const LAUNCHER_RUN_TIMEOUT_MS = 25000;
const LONG_SMOKE_TEST_TIMEOUT_MS = 30000;
function writeExecutable(filePath: string, body: string): void {
fs.writeFileSync(filePath, body);
fs.chmodSync(filePath, 0o755);
@@ -162,7 +165,7 @@ function runLauncher(
{
env,
encoding: 'utf8',
timeout: 15000,
timeout: LAUNCHER_RUN_TIMEOUT_MS,
},
);
@@ -263,7 +266,7 @@ test('launcher mpv status returns ready when socket is connectable', async () =>
test(
'launcher start-overlay run forwards socket/backend and stops overlay after mpv exits',
{ timeout: 20000 },
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
async () => {
await withSmokeCase('overlay-start-stop', async (smokeCase) => {
const env = makeTestEnv(smokeCase);
@@ -322,7 +325,7 @@ test(
test(
'launcher starts mpv paused when plugin auto-start visible overlay gate is enabled',
{ timeout: 20000 },
{ timeout: LONG_SMOKE_TEST_TIMEOUT_MS },
async () => {
await withSmokeCase('autoplay-ready-gate', async (smokeCase) => {
fs.writeFileSync(

View File

@@ -48,7 +48,7 @@ function parseCliArgs(argv: string[]): CliOptions {
let colorBand1 = '#ed8796';
let colorBand2 = '#f5a97f';
let colorBand3 = '#f9e2af';
let colorBand4 = '#a6e3a1';
let colorBand4 = '#8bd5ca';
let colorBand5 = '#8aadf4';
let colorKnown = '#a6da95';
let colorNPlusOne = '#c6a0f6';

View File

@@ -222,9 +222,11 @@ test('AnkiIntegration does not allocate proxy server when proxy transport is dis
);
const privateState = integration as unknown as {
runtime: {
proxyServer: unknown | null;
};
assert.equal(privateState.proxyServer, null);
};
assert.equal(privateState.runtime.proxyServer, null);
});
test('FieldGroupingMergeCollaborator synchronizes ExpressionAudio from merged SentenceAudio', async () => {

View File

@@ -48,6 +48,7 @@ import { FieldGroupingService } from './anki-integration/field-grouping';
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime';
const log = createLogger('anki').child('integration');
@@ -113,8 +114,6 @@ export class AnkiIntegration {
private timingTracker: SubtitleTimingTracker;
private config: AnkiConnectConfig;
private pollingRunner!: PollingRunner;
private proxyServer: AnkiConnectProxyServer | null = null;
private started = false;
private previousNoteIds = new Set<number>();
private mpvClient: MpvClient;
private osdCallback: ((text: string) => void) | null = null;
@@ -135,6 +134,7 @@ export class AnkiIntegration {
private fieldGroupingService: FieldGroupingService;
private noteUpdateWorkflow: NoteUpdateWorkflow;
private fieldGroupingWorkflow: FieldGroupingWorkflow;
private runtime: AnkiIntegrationRuntime;
constructor(
config: AnkiConnectConfig,
@@ -148,7 +148,7 @@ export class AnkiIntegration {
}) => Promise<KikuFieldGroupingChoice>,
knownWordCacheStatePath?: string,
) {
this.config = this.normalizeConfig(config);
this.config = normalizeAnkiIntegrationConfig(config);
this.client = new AnkiConnectClient(this.config.url!);
this.mediaGenerator = new MediaGenerator();
this.timingTracker = timingTracker;
@@ -163,6 +163,7 @@ export class AnkiIntegration {
this.fieldGroupingService = this.createFieldGroupingService();
this.noteUpdateWorkflow = this.createNoteUpdateWorkflow();
this.fieldGroupingWorkflow = this.createFieldGroupingWorkflow();
this.runtime = this.createRuntime(config);
}
private createFieldGroupingMergeCollaborator(): FieldGroupingMergeCollaborator {
@@ -182,75 +183,6 @@ export class AnkiIntegration {
});
}
private normalizeConfig(config: AnkiConnectConfig): AnkiConnectConfig {
const resolvedUrl =
typeof config.url === 'string' && config.url.trim().length > 0
? config.url.trim()
: DEFAULT_ANKI_CONNECT_CONFIG.url;
const proxySource =
config.proxy && typeof config.proxy === 'object'
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
: {};
const normalizedProxyPort =
typeof proxySource.port === 'number' &&
Number.isInteger(proxySource.port) &&
proxySource.port >= 1 &&
proxySource.port <= 65535
? proxySource.port
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
const normalizedProxyHost =
typeof proxySource.host === 'string' && proxySource.host.trim().length > 0
? proxySource.host.trim()
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
const normalizedProxyUpstreamUrl =
typeof proxySource.upstreamUrl === 'string' && proxySource.upstreamUrl.trim().length > 0
? proxySource.upstreamUrl.trim()
: resolvedUrl;
return {
...DEFAULT_ANKI_CONNECT_CONFIG,
...config,
url: resolvedUrl,
fields: {
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
...(config.fields ?? {}),
},
proxy: {
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
...(config.proxy ?? {}),
enabled: proxySource.enabled === true,
host: normalizedProxyHost,
port: normalizedProxyPort,
upstreamUrl: normalizedProxyUpstreamUrl,
},
ai: {
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
...(config.openRouter ?? {}),
...(config.ai ?? {}),
},
media: {
...DEFAULT_ANKI_CONNECT_CONFIG.media,
...(config.media ?? {}),
},
behavior: {
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
...(config.behavior ?? {}),
},
metadata: {
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
...(config.metadata ?? {}),
},
isLapis: {
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
...(config.isLapis ?? {}),
},
isKiku: {
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
...(config.isKiku ?? {}),
},
} as AnkiConnectConfig;
}
private createKnownWordCache(knownWordCacheStatePath?: string): KnownWordCacheManager {
return new KnownWordCacheManager({
client: {
@@ -302,11 +234,20 @@ export class AnkiIntegration {
});
}
private getOrCreateProxyServer(): AnkiConnectProxyServer {
if (!this.proxyServer) {
this.proxyServer = this.createProxyServer();
}
return this.proxyServer;
private createRuntime(initialConfig: AnkiConnectConfig): AnkiIntegrationRuntime {
return new AnkiIntegrationRuntime({
initialConfig,
pollingRunner: this.pollingRunner,
knownWordCache: this.knownWordCache,
proxyServerFactory: () => this.createProxyServer(),
logInfo: (message, ...args) => log.info(message, ...args),
logWarn: (message, ...args) => log.warn(message, ...args),
logError: (message, ...args) => log.error(message, ...args),
onConfigChanged: (nextConfig) => {
this.config = nextConfig;
this.client = new AnkiConnectClient(nextConfig.url!);
},
});
}
private createCardCreationService(): CardCreationService {
@@ -517,14 +458,6 @@ export class AnkiIntegration {
return this.config.nPlusOne?.highlightEnabled === true;
}
private startKnownWordCacheLifecycle(): void {
this.knownWordCache.startLifecycle();
}
private stopKnownWordCacheLifecycle(): void {
this.knownWordCache.stopLifecycle();
}
private getConfiguredAnkiTags(): string[] {
if (!Array.isArray(this.config.tags)) {
return [];
@@ -606,64 +539,12 @@ export class AnkiIntegration {
};
}
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
return config.proxy?.enabled === true;
}
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
if (this.isProxyTransportEnabled(config)) {
return [
'proxy',
config.proxy?.host ?? '',
String(config.proxy?.port ?? ''),
config.proxy?.upstreamUrl ?? '',
].join(':');
}
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
':',
);
}
private startTransport(): void {
if (this.isProxyTransportEnabled()) {
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
const proxyPort = this.config.proxy?.port ?? 8766;
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
this.getOrCreateProxyServer().start({
host: proxyHost,
port: proxyPort,
upstreamUrl,
});
log.info(
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
);
return;
}
log.info('Starting AnkiConnect integration with polling rate:', this.config.pollingRate);
this.pollingRunner.start();
}
private stopTransport(): void {
this.pollingRunner.stop();
this.proxyServer?.stop();
}
start(): void {
if (this.started) {
this.stop();
}
this.startKnownWordCacheLifecycle();
this.startTransport();
this.started = true;
this.runtime.start();
}
stop(): void {
this.stopTransport();
this.stopKnownWordCacheLifecycle();
this.started = false;
log.info('Stopped AnkiConnect integration');
this.runtime.stop();
}
private async processNewCard(
@@ -1216,58 +1097,7 @@ export class AnkiIntegration {
}
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
const wasEnabled = this.config.nPlusOne?.highlightEnabled === true;
const previousTransportKey = this.getTransportConfigKey(this.config);
const mergedConfig: AnkiConnectConfig = {
...this.config,
...patch,
nPlusOne:
patch.nPlusOne !== undefined
? {
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
...patch.nPlusOne,
}
: this.config.nPlusOne,
fields:
patch.fields !== undefined
? { ...this.config.fields, ...patch.fields }
: this.config.fields,
media:
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
behavior:
patch.behavior !== undefined
? { ...this.config.behavior, ...patch.behavior }
: this.config.behavior,
proxy:
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
metadata:
patch.metadata !== undefined
? { ...this.config.metadata, ...patch.metadata }
: this.config.metadata,
isLapis:
patch.isLapis !== undefined
? { ...this.config.isLapis, ...patch.isLapis }
: this.config.isLapis,
isKiku:
patch.isKiku !== undefined
? { ...this.config.isKiku, ...patch.isKiku }
: this.config.isKiku,
};
this.config = this.normalizeConfig(mergedConfig);
if (wasEnabled && this.config.nPlusOne?.highlightEnabled === false) {
this.stopKnownWordCacheLifecycle();
this.knownWordCache.clearKnownWordCacheState();
} else {
this.startKnownWordCacheLifecycle();
}
const nextTransportKey = this.getTransportConfigKey(this.config);
if (this.started && previousTransportKey !== nextTransportKey) {
this.stopTransport();
this.startTransport();
}
this.runtime.applyRuntimeConfigPatch(patch);
}
destroy(): void {

View File

@@ -80,7 +80,7 @@ export class FieldGroupingWorkflow {
async handleManual(
originalNoteId: number,
newNoteId: number,
_newNoteId: number,
newNoteInfo: FieldGroupingWorkflowNoteInfo,
): Promise<boolean> {
const callback = await this.resolveFieldGroupingCallback();

View File

@@ -0,0 +1,108 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
import type { AnkiConnectConfig } from '../types';
import { AnkiIntegrationRuntime } from './runtime';
function createRuntime(
config: Partial<AnkiConnectConfig> = {},
overrides: Partial<ConstructorParameters<typeof AnkiIntegrationRuntime>[0]> = {},
) {
const calls: string[] = [];
const runtime = new AnkiIntegrationRuntime({
initialConfig: config as AnkiConnectConfig,
pollingRunner: {
start: () => calls.push('polling:start'),
stop: () => calls.push('polling:stop'),
},
knownWordCache: {
startLifecycle: () => calls.push('known:start'),
stopLifecycle: () => calls.push('known:stop'),
clearKnownWordCacheState: () => calls.push('known:clear'),
},
proxyServerFactory: () => ({
start: ({ host, port, upstreamUrl }) =>
calls.push(`proxy:start:${host}:${port}:${upstreamUrl}`),
stop: () => calls.push('proxy:stop'),
}),
logInfo: () => undefined,
logWarn: () => undefined,
logError: () => undefined,
onConfigChanged: () => undefined,
...overrides,
});
return { runtime, calls };
}
test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
const { runtime } = createRuntime({
url: ' http://anki.local:8765 ',
proxy: {
enabled: true,
host: ' 0.0.0.0 ',
port: 7001,
upstreamUrl: ' ',
},
});
const normalized = runtime.getConfig();
assert.equal(normalized.url, 'http://anki.local:8765');
assert.equal(normalized.proxy?.enabled, true);
assert.equal(normalized.proxy?.host, '0.0.0.0');
assert.equal(normalized.proxy?.port, 7001);
assert.equal(normalized.proxy?.upstreamUrl, 'http://anki.local:8765');
assert.equal(normalized.media?.fallbackDuration, DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration);
});
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
const { runtime, calls } = createRuntime({
proxy: {
enabled: true,
host: '127.0.0.1',
port: 9999,
upstreamUrl: 'http://upstream:8765',
},
});
runtime.start();
assert.deepEqual(calls, [
'known:start',
'proxy:start:127.0.0.1:9999:http://upstream:8765',
]);
});
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
const { runtime, calls } = createRuntime({
nPlusOne: {
highlightEnabled: true,
},
pollingRate: 250,
});
runtime.start();
calls.length = 0;
runtime.applyRuntimeConfigPatch({
nPlusOne: {
highlightEnabled: false,
},
proxy: {
enabled: true,
host: '127.0.0.1',
port: 8766,
upstreamUrl: 'http://127.0.0.1:8765',
},
});
assert.deepEqual(calls, [
'known:stop',
'known:clear',
'polling:stop',
'proxy:start:127.0.0.1:8766:http://127.0.0.1:8765',
]);
});

View File

@@ -0,0 +1,233 @@
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
import type { AnkiConnectConfig } from '../types';
export interface AnkiIntegrationRuntimeProxyServer {
start(options: { host: string; port: number; upstreamUrl: string }): void;
stop(): void;
}
interface AnkiIntegrationRuntimeDeps {
initialConfig: AnkiConnectConfig;
pollingRunner: {
start(): void;
stop(): void;
};
knownWordCache: {
startLifecycle(): void;
stopLifecycle(): void;
clearKnownWordCacheState(): void;
};
proxyServerFactory: () => AnkiIntegrationRuntimeProxyServer;
logInfo: (message: string, ...args: unknown[]) => void;
logWarn: (message: string, ...args: unknown[]) => void;
logError: (message: string, ...args: unknown[]) => void;
onConfigChanged?: (config: AnkiConnectConfig) => void;
}
function trimToNonEmptyString(value: unknown): string | null {
if (typeof value !== 'string') return null;
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : null;
}
export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiConnectConfig {
const resolvedUrl =
trimToNonEmptyString(config.url) ?? DEFAULT_ANKI_CONNECT_CONFIG.url;
const proxySource =
config.proxy && typeof config.proxy === 'object'
? (config.proxy as NonNullable<AnkiConnectConfig['proxy']>)
: {};
const normalizedProxyPort =
typeof proxySource.port === 'number' &&
Number.isInteger(proxySource.port) &&
proxySource.port >= 1 &&
proxySource.port <= 65535
? proxySource.port
: DEFAULT_ANKI_CONNECT_CONFIG.proxy?.port;
const normalizedProxyHost =
trimToNonEmptyString(proxySource.host) ?? DEFAULT_ANKI_CONNECT_CONFIG.proxy?.host;
const normalizedProxyUpstreamUrl = trimToNonEmptyString(proxySource.upstreamUrl) ?? resolvedUrl;
return {
...DEFAULT_ANKI_CONNECT_CONFIG,
...config,
url: resolvedUrl,
fields: {
...DEFAULT_ANKI_CONNECT_CONFIG.fields,
...(config.fields ?? {}),
},
proxy: {
...DEFAULT_ANKI_CONNECT_CONFIG.proxy,
...(config.proxy ?? {}),
enabled: proxySource.enabled === true,
host: normalizedProxyHost,
port: normalizedProxyPort,
upstreamUrl: normalizedProxyUpstreamUrl,
},
ai: {
...DEFAULT_ANKI_CONNECT_CONFIG.ai,
...(config.openRouter ?? {}),
...(config.ai ?? {}),
},
media: {
...DEFAULT_ANKI_CONNECT_CONFIG.media,
...(config.media ?? {}),
},
behavior: {
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
...(config.behavior ?? {}),
},
metadata: {
...DEFAULT_ANKI_CONNECT_CONFIG.metadata,
...(config.metadata ?? {}),
},
isLapis: {
...DEFAULT_ANKI_CONNECT_CONFIG.isLapis,
...(config.isLapis ?? {}),
},
isKiku: {
...DEFAULT_ANKI_CONNECT_CONFIG.isKiku,
...(config.isKiku ?? {}),
},
} as AnkiConnectConfig;
}
export class AnkiIntegrationRuntime {
private config: AnkiConnectConfig;
private proxyServer: AnkiIntegrationRuntimeProxyServer | null = null;
private started = false;
constructor(private readonly deps: AnkiIntegrationRuntimeDeps) {
this.config = normalizeAnkiIntegrationConfig(deps.initialConfig);
}
getConfig(): AnkiConnectConfig {
return this.config;
}
start(): void {
if (this.started) {
this.stop();
}
this.deps.knownWordCache.startLifecycle();
this.startTransport();
this.started = true;
}
stop(): void {
this.stopTransport();
this.deps.knownWordCache.stopLifecycle();
this.started = false;
this.deps.logInfo('Stopped AnkiConnect integration');
}
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
const wasKnownWordCacheEnabled = this.config.nPlusOne?.highlightEnabled === true;
const previousTransportKey = this.getTransportConfigKey(this.config);
const mergedConfig: AnkiConnectConfig = {
...this.config,
...patch,
nPlusOne:
patch.nPlusOne !== undefined
? {
...(this.config.nPlusOne ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne),
...patch.nPlusOne,
}
: this.config.nPlusOne,
fields:
patch.fields !== undefined
? { ...this.config.fields, ...patch.fields }
: this.config.fields,
media:
patch.media !== undefined ? { ...this.config.media, ...patch.media } : this.config.media,
behavior:
patch.behavior !== undefined
? { ...this.config.behavior, ...patch.behavior }
: this.config.behavior,
proxy:
patch.proxy !== undefined ? { ...this.config.proxy, ...patch.proxy } : this.config.proxy,
metadata:
patch.metadata !== undefined
? { ...this.config.metadata, ...patch.metadata }
: this.config.metadata,
isLapis:
patch.isLapis !== undefined
? { ...this.config.isLapis, ...patch.isLapis }
: this.config.isLapis,
isKiku:
patch.isKiku !== undefined
? { ...this.config.isKiku, ...patch.isKiku }
: this.config.isKiku,
};
this.config = normalizeAnkiIntegrationConfig(mergedConfig);
this.deps.onConfigChanged?.(this.config);
if (wasKnownWordCacheEnabled && this.config.nPlusOne?.highlightEnabled === false) {
this.deps.knownWordCache.stopLifecycle();
this.deps.knownWordCache.clearKnownWordCacheState();
} else {
this.deps.knownWordCache.startLifecycle();
}
const nextTransportKey = this.getTransportConfigKey(this.config);
if (this.started && previousTransportKey !== nextTransportKey) {
this.stopTransport();
this.startTransport();
}
}
getOrCreateProxyServer(): AnkiIntegrationRuntimeProxyServer {
if (!this.proxyServer) {
this.proxyServer = this.deps.proxyServerFactory();
}
return this.proxyServer;
}
private isProxyTransportEnabled(config: AnkiConnectConfig = this.config): boolean {
return config.proxy?.enabled === true;
}
private getTransportConfigKey(config: AnkiConnectConfig = this.config): string {
if (this.isProxyTransportEnabled(config)) {
return [
'proxy',
config.proxy?.host ?? '',
String(config.proxy?.port ?? ''),
config.proxy?.upstreamUrl ?? '',
].join(':');
}
return ['polling', String(config.pollingRate ?? DEFAULT_ANKI_CONNECT_CONFIG.pollingRate)].join(
':',
);
}
private startTransport(): void {
if (this.isProxyTransportEnabled()) {
const proxyHost = this.config.proxy?.host ?? '127.0.0.1';
const proxyPort = this.config.proxy?.port ?? 8766;
const upstreamUrl = this.config.proxy?.upstreamUrl ?? this.config.url ?? '';
this.getOrCreateProxyServer().start({
host: proxyHost,
port: proxyPort,
upstreamUrl,
});
this.deps.logInfo(
`Starting AnkiConnect integration with local proxy: http://${proxyHost}:${proxyPort} -> ${upstreamUrl}`,
);
return;
}
this.deps.logInfo(
'Starting AnkiConnect integration with polling rate:',
this.config.pollingRate,
);
this.deps.pollingRunner.start();
}
private stopTransport(): void {
this.deps.pollingRunner.stop();
this.proxyServer?.stop();
}
}

View File

@@ -242,6 +242,49 @@ test('parses subtitleStyle.hoverTokenColor and warns on invalid values', () => {
);
});
test('parses subtitleStyle.nameMatchColor and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchColor": "#eed49f"
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(
((validService.getConfig().subtitleStyle as unknown as Record<string, unknown>).nameMatchColor ??
null) as string | null,
'#eed49f',
);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchColor": "pink"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
((invalidService.getConfig().subtitleStyle as unknown as Record<string, unknown>)
.nameMatchColor ?? null) as string | null,
'#f5bde6',
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'subtitleStyle.nameMatchColor'),
);
});
test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
@@ -280,6 +323,44 @@ test('parses subtitleStyle.hoverTokenBackgroundColor and warns on invalid values
);
});
test('parses subtitleStyle.nameMatchEnabled and warns on invalid values', () => {
const validDir = makeTempDir();
fs.writeFileSync(
path.join(validDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchEnabled": false
}
}`,
'utf-8',
);
const validService = new ConfigService(validDir);
assert.equal(validService.getConfig().subtitleStyle.nameMatchEnabled, false);
const invalidDir = makeTempDir();
fs.writeFileSync(
path.join(invalidDir, 'config.jsonc'),
`{
"subtitleStyle": {
"nameMatchEnabled": "no"
}
}`,
'utf-8',
);
const invalidService = new ConfigService(invalidDir);
assert.equal(
invalidService.getConfig().subtitleStyle.nameMatchEnabled,
DEFAULT_CONFIG.subtitleStyle.nameMatchEnabled,
);
assert.ok(
invalidService
.getWarnings()
.some((warning) => warning.path === 'subtitleStyle.nameMatchEnabled'),
);
});
test('parses anilist.enabled and warns for invalid value', () => {
const dir = makeTempDir();
fs.writeFileSync(

View File

@@ -8,6 +8,8 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
autoPauseVideoOnYomitanPopup: false,
hoverTokenColor: '#f4dbd6',
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
nameMatchEnabled: true,
nameMatchColor: '#f5bde6',
fontFamily: 'M PLUS 1 Medium, Source Han Sans JP, Noto Sans CJK JP',
fontSize: 35,
fontColor: '#cad3f5',
@@ -37,7 +39,7 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
mode: 'single',
matchMode: 'headword',
singleColor: '#f5a97f',
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#a6e3a1', '#8aadf4'],
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#8bd5ca', '#8aadf4'],
},
secondary: {
fontFamily: 'Inter, Noto Sans, Helvetica Neue, sans-serif',

View File

@@ -47,6 +47,20 @@ export function buildSubtitleConfigOptionRegistry(
defaultValue: defaultConfig.subtitleStyle.hoverTokenBackgroundColor,
description: 'CSS color used for hovered subtitle token background highlight in mpv.',
},
{
path: 'subtitleStyle.nameMatchEnabled',
kind: 'boolean',
defaultValue: defaultConfig.subtitleStyle.nameMatchEnabled,
description:
'Enable subtitle token coloring for matches from the SubMiner character dictionary.',
},
{
path: 'subtitleStyle.nameMatchColor',
kind: 'string',
defaultValue: defaultConfig.subtitleStyle.nameMatchColor,
description:
'Hex color used when a subtitle token matches an entry from the SubMiner character dictionary.',
},
{
path: 'subtitleStyle.frequencyDictionary.enabled',
kind: 'boolean',

View File

@@ -105,6 +105,8 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
const fallbackSubtitleStyleHoverTokenColor = resolved.subtitleStyle.hoverTokenColor;
const fallbackSubtitleStyleHoverTokenBackgroundColor =
resolved.subtitleStyle.hoverTokenBackgroundColor;
const fallbackSubtitleStyleNameMatchEnabled = resolved.subtitleStyle.nameMatchEnabled;
const fallbackSubtitleStyleNameMatchColor = resolved.subtitleStyle.nameMatchColor;
const fallbackFrequencyDictionary = {
...resolved.subtitleStyle.frequencyDictionary,
};
@@ -228,6 +230,36 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
);
}
const nameMatchColor = asColor(
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
);
const nameMatchEnabled = asBoolean(
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
);
if (nameMatchEnabled !== undefined) {
resolved.subtitleStyle.nameMatchEnabled = nameMatchEnabled;
} else if ((src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled !== undefined) {
resolved.subtitleStyle.nameMatchEnabled = fallbackSubtitleStyleNameMatchEnabled;
warn(
'subtitleStyle.nameMatchEnabled',
(src.subtitleStyle as { nameMatchEnabled?: unknown }).nameMatchEnabled,
resolved.subtitleStyle.nameMatchEnabled,
'Expected boolean.',
);
}
if (nameMatchColor !== undefined) {
resolved.subtitleStyle.nameMatchColor = nameMatchColor;
} else if ((src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor !== undefined) {
resolved.subtitleStyle.nameMatchColor = fallbackSubtitleStyleNameMatchColor;
warn(
'subtitleStyle.nameMatchColor',
(src.subtitleStyle as { nameMatchColor?: unknown }).nameMatchColor,
resolved.subtitleStyle.nameMatchColor,
'Expected hex color.',
);
}
const frequencyDictionary = isObject(
(src.subtitleStyle as { frequencyDictionary?: unknown }).frequencyDictionary,
)

View File

@@ -66,6 +66,70 @@ test('subtitleStyle autoPauseVideoOnYomitanPopup falls back on invalid value', (
);
});
test('subtitleStyle nameMatchEnabled falls back on invalid value', () => {
const { context, warnings } = createResolveContext({
subtitleStyle: {
nameMatchEnabled: 'invalid' as unknown as boolean,
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleStyle.nameMatchEnabled, true);
assert.ok(
warnings.some(
(warning) =>
warning.path === 'subtitleStyle.nameMatchEnabled' &&
warning.message === 'Expected boolean.',
),
);
});
test('subtitleStyle frequencyDictionary defaults to the teal fourth band color', () => {
const { context } = createResolveContext({});
applySubtitleDomainConfig(context);
assert.deepEqual(context.resolved.subtitleStyle.frequencyDictionary.bandedColors, [
'#ed8796',
'#f5a97f',
'#f9e2af',
'#8bd5ca',
'#8aadf4',
]);
});
test('subtitleStyle nameMatchColor accepts valid values and warns on invalid', () => {
const valid = createResolveContext({
subtitleStyle: {
nameMatchColor: '#f5bde6',
},
});
applySubtitleDomainConfig(valid.context);
assert.equal(
(valid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
'#f5bde6',
);
const invalid = createResolveContext({
subtitleStyle: {
nameMatchColor: 'pink',
},
});
applySubtitleDomainConfig(invalid.context);
assert.equal(
(invalid.context.resolved.subtitleStyle as { nameMatchColor?: string }).nameMatchColor,
'#f5bde6',
);
assert.ok(
invalid.warnings.some(
(warning) =>
warning.path === 'subtitleStyle.nameMatchColor' &&
warning.message === 'Expected hex color.',
),
);
});
test('subtitleStyle frequencyDictionary.matchMode accepts valid values and warns on invalid', () => {
const valid = createResolveContext({
subtitleStyle: {

View File

@@ -42,6 +42,7 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
},
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
setVisibleOverlayVisible: (visible) => calls.push(`setVisibleOverlayVisible:${visible}`),
initializeOverlayRuntime: () => calls.push('initializeOverlayRuntime'),
handleInitialArgs: () => calls.push('handleInitialArgs'),
logDebug: (message) => calls.push(`debug:${message}`),
@@ -57,7 +58,11 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
});
await runAppReadyRuntime(deps);
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
assert.ok(calls.includes('initializeOverlayRuntime'));
assert.ok(
calls.indexOf('setVisibleOverlayVisible:true') < calls.indexOf('initializeOverlayRuntime'),
);
assert.ok(calls.includes('startBackgroundWarmups'));
assert.ok(
calls.includes(

View File

@@ -116,6 +116,7 @@ export interface AppReadyRuntimeDeps {
startBackgroundWarmups: () => void;
texthookerOnlyMode: boolean;
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
setVisibleOverlayVisible: (visible: boolean) => void;
initializeOverlayRuntime: () => void;
handleInitialArgs: () => void;
logDebug?: (message: string) => void;
@@ -226,6 +227,7 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
if (deps.texthookerOnlyMode) {
deps.log('Texthooker-only mode enabled; skipping overlay window.');
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
deps.setVisibleOverlayVisible(true);
deps.initializeOverlayRuntime();
} else {
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');

View File

@@ -24,6 +24,7 @@ interface YomitanTokenInput {
surface: string;
reading?: string;
headword?: string;
isNameMatch?: boolean;
}
function makeDepsFromYomitanTokens(
@@ -53,6 +54,7 @@ function makeDepsFromYomitanTokens(
headword: token.headword ?? token.surface,
startPos,
endPos,
isNameMatch: token.isNameMatch ?? false,
};
});
},
@@ -115,6 +117,20 @@ test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () =>
assert.equal(result.tokens?.[0]?.jlptLevel, 'N5');
});
test('tokenizeSubtitle preserves Yomitan name-match metadata on tokens', async () => {
const result = await tokenizeSubtitle(
'アクアです',
makeDepsFromYomitanTokens([
{ surface: 'アクア', reading: 'あくあ', headword: 'アクア', isNameMatch: true },
{ surface: 'です', reading: 'です', headword: 'です' },
]),
);
assert.equal(result.tokens?.length, 2);
assert.equal((result.tokens?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
assert.equal((result.tokens?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
});
test('tokenizeSubtitle caches JLPT lookups across repeated tokens', async () => {
let lookupCalls = 0;
const result = await tokenizeSubtitle(
@@ -1235,6 +1251,30 @@ test('tokenizeSubtitle normalizes newlines before Yomitan parse request', async
assert.equal(result.tokens, null);
});
test('tokenizeSubtitle collapses zero-width separators before Yomitan parse request', async () => {
let parseInput = '';
const result = await tokenizeSubtitle(
'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが',
makeDeps({
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
getYomitanParserWindow: () =>
({
isDestroyed: () => false,
webContents: {
executeJavaScript: async (script: string) => {
parseInput = script;
return null;
},
},
}) as unknown as Electron.BrowserWindow,
}),
);
assert.match(parseInput, /キリキリと かかってこい このヘナチョコ冒険者どもめが!/);
assert.equal(result.text, 'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが');
assert.equal(result.tokens, null);
});
test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable', async () => {
const result = await tokenizeSubtitle('猫です', makeDeps());

View File

@@ -44,6 +44,7 @@ export interface TokenizerServiceDeps {
getJlptLevel: (text: string) => JlptLevel | null;
getNPlusOneEnabled?: () => boolean;
getJlptEnabled?: () => boolean;
getNameMatchEnabled?: () => boolean;
getFrequencyDictionaryEnabled?: () => boolean;
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
getFrequencyRank?: FrequencyDictionaryLookup;
@@ -73,6 +74,7 @@ export interface TokenizerDepsRuntimeOptions {
getJlptLevel: (text: string) => JlptLevel | null;
getNPlusOneEnabled?: () => boolean;
getJlptEnabled?: () => boolean;
getNameMatchEnabled?: () => boolean;
getFrequencyDictionaryEnabled?: () => boolean;
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
getFrequencyRank?: FrequencyDictionaryLookup;
@@ -85,6 +87,7 @@ export interface TokenizerDepsRuntimeOptions {
interface TokenizerAnnotationOptions {
nPlusOneEnabled: boolean;
jlptEnabled: boolean;
nameMatchEnabled: boolean;
frequencyEnabled: boolean;
frequencyMatchMode: FrequencyDictionaryMatchMode;
minSentenceWordsForNPlusOne: number | undefined;
@@ -106,6 +109,7 @@ const DEFAULT_ANNOTATION_POS1_EXCLUSIONS = resolveAnnotationPos1ExclusionSet(
const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
);
const INVISIBLE_SEPARATOR_PATTERN = /[\u200b\u2060\ufeff]/g;
function getKnownWordLookup(
deps: TokenizerServiceDeps,
@@ -189,6 +193,7 @@ export function createTokenizerDepsRuntime(
getJlptLevel: options.getJlptLevel,
getNPlusOneEnabled: options.getNPlusOneEnabled,
getJlptEnabled: options.getJlptEnabled,
getNameMatchEnabled: options.getNameMatchEnabled,
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
getFrequencyRank: options.getFrequencyRank,
@@ -300,6 +305,7 @@ function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] {
isMerged: token.isMerged ?? true,
isKnown: token.isKnown ?? false,
isNPlusOneTarget: token.isNPlusOneTarget ?? false,
isNameMatch: token.isNameMatch ?? false,
reading: normalizeYomitanMergedReading(token),
}));
}
@@ -459,6 +465,7 @@ function getAnnotationOptions(deps: TokenizerServiceDeps): TokenizerAnnotationOp
return {
nPlusOneEnabled: deps.getNPlusOneEnabled?.() !== false,
jlptEnabled: deps.getJlptEnabled?.() !== false,
nameMatchEnabled: deps.getNameMatchEnabled?.() !== false,
frequencyEnabled: deps.getFrequencyDictionaryEnabled?.() !== false,
frequencyMatchMode: deps.getFrequencyDictionaryMatchMode?.() ?? 'headword',
minSentenceWordsForNPlusOne: deps.getMinSentenceWordsForNPlusOne?.(),
@@ -472,7 +479,9 @@ async function parseWithYomitanInternalParser(
deps: TokenizerServiceDeps,
options: TokenizerAnnotationOptions,
): Promise<MergedToken[] | null> {
const selectedTokens = await requestYomitanScanTokens(text, deps, logger);
const selectedTokens = await requestYomitanScanTokens(text, deps, logger, {
includeNameMatchMetadata: options.nameMatchEnabled,
});
if (!selectedTokens || selectedTokens.length === 0) {
return null;
}
@@ -488,6 +497,7 @@ async function parseWithYomitanInternalParser(
isMerged: true,
isKnown: false,
isNPlusOneTarget: false,
isNameMatch: token.isNameMatch ?? false,
}),
),
);
@@ -563,7 +573,11 @@ export async function tokenizeSubtitle(
return { text, tokens: null };
}
const tokenizeText = displayText.replace(/\n/g, ' ').replace(/\s+/g, ' ').trim();
const tokenizeText = displayText
.replace(INVISIBLE_SEPARATOR_PATTERN, ' ')
.replace(/\n/g, ' ')
.replace(/\s+/g, ' ')
.trim();
const annotationOptions = getAnnotationOptions(deps);
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions);

View File

@@ -3,12 +3,12 @@ import * as fs from 'fs';
import * as os from 'os';
import * as path from 'path';
import test from 'node:test';
import * as vm from 'node:vm';
import {
getYomitanDictionaryInfo,
importYomitanDictionaryFromZip,
deleteYomitanDictionaryByTitle,
removeYomitanDictionarySettings,
requestYomitanParseResults,
requestYomitanScanTokens,
requestYomitanTermFrequencies,
syncYomitanDefaultAnkiServer,
@@ -40,6 +40,40 @@ function createDeps(
};
}
async function runInjectedYomitanScript(
script: string,
handler: (action: string, params: unknown) => unknown,
): Promise<unknown> {
return await vm.runInNewContext(script, {
chrome: {
runtime: {
lastError: null,
sendMessage: (
payload: { action?: string; params?: unknown },
callback: (response: { result?: unknown; error?: { message?: string } }) => void,
) => {
try {
callback({ result: handler(payload.action ?? '', payload.params) });
} catch (error) {
callback({ error: { message: (error as Error).message } });
}
},
},
},
Array,
Error,
JSON,
Map,
Math,
Number,
Object,
Promise,
RegExp,
Set,
String,
});
}
test('syncYomitanDefaultAnkiServer updates default profile server when script reports update', async () => {
let scriptValue = '';
const deps = createDeps(async (script) => {
@@ -451,6 +485,164 @@ test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of
assert.match(scannerScript ?? '', /deinflect:\s*true/);
});
test('requestYomitanScanTokens marks tokens backed by SubMiner character dictionary entries', async () => {
const deps = createDeps(async (script) => {
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return [
{
surface: 'アクア',
reading: 'あくあ',
headword: 'アクア',
startPos: 0,
endPos: 3,
isNameMatch: true,
},
{
surface: 'です',
reading: 'です',
headword: 'です',
startPos: 3,
endPos: 5,
isNameMatch: false,
},
];
});
const result = await requestYomitanScanTokens('アクアです', deps, {
error: () => undefined,
});
assert.equal(result?.length, 2);
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
assert.equal((result?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
});
test('requestYomitanScanTokens skips name-match work when disabled', async () => {
let scannerScript = '';
const deps = createDeps(async (script) => {
if (script.includes('termsFind')) {
scannerScript = script;
}
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return [
{
surface: 'アクア',
reading: 'あくあ',
headword: 'アクア',
startPos: 0,
endPos: 3,
},
];
});
const result = await requestYomitanScanTokens(
'アクア',
deps,
{ error: () => undefined },
{ includeNameMatchMetadata: false },
);
assert.equal(result?.length, 1);
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, undefined);
assert.match(scannerScript, /const includeNameMatchMetadata = false;/);
});
test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary alias only exists on definitions', async () => {
let scannerScript = '';
const deps = createDeps(async (script) => {
if (script.includes('termsFind')) {
scannerScript = script;
return [];
}
if (script.includes('optionsGetFull')) {
return {
profileCurrent: 0,
profiles: [
{
options: {
scanning: { length: 40 },
},
},
],
};
}
return null;
});
await requestYomitanScanTokens(
'カズマ',
deps,
{ error: () => undefined },
{ includeNameMatchMetadata: true },
);
assert.match(scannerScript, /getPreferredHeadword/);
const result = await runInjectedYomitanScript(scannerScript, (action, params) => {
if (action === 'termsFind') {
const text = (params as { text?: string } | undefined)?.text;
if (text === 'カズマ') {
return {
originalTextLength: 3,
dictionaryEntries: [
{
dictionaryAlias: '',
headwords: [
{
term: 'カズマ',
reading: 'かずま',
sources: [{ originalText: 'カズマ', isPrimary: true, matchType: 'exact' }],
},
],
definitions: [
{ dictionary: 'JMdict', dictionaryAlias: 'JMdict' },
{
dictionary: 'SubMiner Character Dictionary (AniList 130298)',
dictionaryAlias: 'SubMiner Character Dictionary (AniList 130298)',
},
],
},
],
};
}
return { originalTextLength: 0, dictionaryEntries: [] };
}
throw new Error(`unexpected action: ${action}`);
});
assert.equal(Array.isArray(result), true);
assert.equal((result as { length?: number } | null)?.length, 1);
assert.equal((result as Array<{ surface?: string }>)[0]?.surface, 'カズマ');
assert.equal((result as Array<{ headword?: string }>)[0]?.headword, 'カズマ');
assert.equal((result as Array<{ startPos?: number }>)[0]?.startPos, 0);
assert.equal((result as Array<{ endPos?: number }>)[0]?.endPos, 3);
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
});
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
let scriptValue = '';
const deps = createDeps(async (script) => {

View File

@@ -45,6 +45,7 @@ export interface YomitanScanToken {
headword: string;
startPos: number;
endPos: number;
isNameMatch?: boolean;
}
interface YomitanProfileMetadata {
@@ -75,7 +76,8 @@ function isScanTokenArray(value: unknown): value is YomitanScanToken[] {
typeof entry.reading === 'string' &&
typeof entry.headword === 'string' &&
typeof entry.startPos === 'number' &&
typeof entry.endPos === 'number',
typeof entry.endPos === 'number' &&
(entry.isNameMatch === undefined || typeof entry.isNameMatch === 'boolean'),
)
);
}
@@ -772,24 +774,92 @@ const YOMITAN_SCANNING_HELPERS = String.raw`
return segments;
}
function getPreferredHeadword(dictionaryEntries, token) {
for (const dictionaryEntry of dictionaryEntries || []) {
for (const headword of dictionaryEntry.headwords || []) {
const validSources = [];
function appendDictionaryNames(target, value) {
if (!value || typeof value !== 'object') {
return;
}
const candidates = [
value.dictionary,
value.dictionaryName,
value.name,
value.title,
value.dictionaryTitle,
value.dictionaryAlias
];
for (const candidate of candidates) {
if (typeof candidate === 'string' && candidate.trim().length > 0) {
target.push(candidate.trim());
}
}
}
function getDictionaryEntryNames(entry) {
const names = [];
appendDictionaryNames(names, entry);
for (const definition of entry?.definitions || []) {
appendDictionaryNames(names, definition);
}
for (const frequency of entry?.frequencies || []) {
appendDictionaryNames(names, frequency);
}
for (const pronunciation of entry?.pronunciations || []) {
appendDictionaryNames(names, pronunciation);
}
return names;
}
function isNameDictionaryEntry(entry) {
if (!includeNameMatchMetadata || !entry || typeof entry !== 'object') {
return false;
}
return getDictionaryEntryNames(entry).some((name) => name.startsWith("SubMiner Character Dictionary"));
}
function hasExactPrimarySource(headword, token) {
for (const src of headword.sources || []) {
if (src.originalText !== token) { continue; }
if (!src.isPrimary) { continue; }
if (src.matchType !== 'exact') { continue; }
validSources.push(src);
return true;
}
if (validSources.length > 0) { return {term: headword.term, reading: headword.reading}; }
return false;
}
let matchedNameDictionary = false;
if (includeNameMatchMetadata) {
for (const dictionaryEntry of dictionaryEntries || []) {
if (!isNameDictionaryEntry(dictionaryEntry)) { continue; }
for (const headword of dictionaryEntry.headwords || []) {
if (!hasExactPrimarySource(headword, token)) { continue; }
matchedNameDictionary = true;
break;
}
if (matchedNameDictionary) { break; }
}
}
for (const dictionaryEntry of dictionaryEntries || []) {
for (const headword of dictionaryEntry.headwords || []) {
if (!hasExactPrimarySource(headword, token)) { continue; }
return {
term: headword.term,
reading: headword.reading,
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntry)
};
}
}
const fallback = dictionaryEntries?.[0]?.headwords?.[0];
return fallback ? {term: fallback.term, reading: fallback.reading} : null;
return fallback
? {
term: fallback.term,
reading: fallback.reading,
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntries?.[0])
}
: null;
}
`;
function buildYomitanScanningScript(text: string, profileIndex: number, scanLength: number): string {
function buildYomitanScanningScript(
text: string,
profileIndex: number,
scanLength: number,
includeNameMatchMetadata: boolean,
): string {
return `
(async () => {
const invoke = (action, params) =>
@@ -811,6 +881,7 @@ function buildYomitanScanningScript(text: string, profileIndex: number, scanLeng
});
});
${YOMITAN_SCANNING_HELPERS}
const includeNameMatchMetadata = ${includeNameMatchMetadata ? 'true' : 'false'};
const text = ${JSON.stringify(text)};
const details = {matchType: "exact", deinflect: true};
const tokens = [];
@@ -834,6 +905,7 @@ ${YOMITAN_SCANNING_HELPERS}
headword: preferredHeadword.term,
startPos: i,
endPos: i + originalTextLength,
isNameMatch: includeNameMatchMetadata && preferredHeadword.isNameMatch === true,
});
i += originalTextLength;
continue;
@@ -944,6 +1016,9 @@ export async function requestYomitanScanTokens(
text: string,
deps: YomitanParserRuntimeDeps,
logger: LoggerLike,
options?: {
includeNameMatchMetadata?: boolean;
},
): Promise<YomitanScanToken[] | null> {
const yomitanExt = deps.getYomitanExt();
if (!text || !yomitanExt) {
@@ -962,7 +1037,12 @@ export async function requestYomitanScanTokens(
try {
const rawResult = await parserWindow.webContents.executeJavaScript(
buildYomitanScanningScript(text, profileIndex, scanLength),
buildYomitanScanningScript(
text,
profileIndex,
scanLength,
options?.includeNameMatchMetadata === true,
),
true,
);
if (isScanTokenArray(rawResult)) {

View File

@@ -0,0 +1,70 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
const DEAD_MODULE_PATHS = [
'src/translators/index.ts',
'src/subsync/engines.ts',
'src/subtitle/pipeline.ts',
'src/subtitle/stages/merge.ts',
'src/subtitle/stages/normalize.ts',
'src/subtitle/stages/normalize.test.ts',
'src/subtitle/stages/tokenize.ts',
'src/tokenizers/index.ts',
'src/token-mergers/index.ts',
] as const;
const FORBIDDEN_IMPORT_PATTERNS = [
/from ['"]\.\.?\/tokenizers['"]/,
/from ['"]\.\.?\/token-mergers['"]/,
/from ['"]\.\.?\/subtitle\/pipeline['"]/,
/from ['"]\.\.?\/subsync\/engines['"]/,
/from ['"]\.\.?\/translators['"]/,
] as const;
function readWorkspaceFile(relativePath: string): string {
return fs.readFileSync(path.join(process.cwd(), relativePath), 'utf8');
}
function collectSourceFiles(rootDir: string): string[] {
const absoluteRoot = path.join(process.cwd(), rootDir);
const out: string[] = [];
const visit = (currentDir: string) => {
for (const entry of fs.readdirSync(currentDir, { withFileTypes: true })) {
const fullPath = path.join(currentDir, entry.name);
if (entry.isDirectory()) {
visit(fullPath);
continue;
}
if (!fullPath.endsWith('.ts') && !fullPath.endsWith('.tsx')) {
continue;
}
out.push(path.relative(process.cwd(), fullPath).replaceAll('\\', '/'));
}
};
visit(absoluteRoot);
out.sort();
return out;
}
test('dead registry and pipeline modules stay removed from the repository', () => {
for (const relativePath of DEAD_MODULE_PATHS) {
assert.equal(
fs.existsSync(path.join(process.cwd(), relativePath)),
false,
`${relativePath} should stay deleted`,
);
}
});
test('live source tree no longer imports dead registry and pipeline modules', () => {
for (const relativePath of collectSourceFiles('src')) {
const source = readWorkspaceFile(relativePath);
for (const pattern of FORBIDDEN_IMPORT_PATTERNS) {
assert.doesNotMatch(source, pattern, `${relativePath} should not import ${pattern.source}`);
}
}
});

View File

@@ -92,8 +92,6 @@ import type {
SecondarySubMode,
SubtitleData,
SubtitlePosition,
SubsyncManualRunRequest,
SubsyncResult,
WindowGeometry,
} from './types';
import { AnkiIntegration } from './anki-integration';
@@ -116,36 +114,15 @@ import {
failStartupFromConfig,
} from './main/config-validation';
import {
buildAnilistAttemptKey,
buildAnilistSetupUrl,
consumeAnilistSetupCallbackUrl,
createAnilistStateRuntime,
createBuildGetAnilistMediaGuessRuntimeStateMainDepsHandler,
createBuildGetCurrentAnilistMediaKeyMainDepsHandler,
createBuildMaybeProbeAnilistDurationMainDepsHandler,
createBuildMaybeRunAnilistPostWatchUpdateMainDepsHandler,
createBuildOpenAnilistSetupWindowMainDepsHandler,
createBuildProcessNextAnilistRetryUpdateMainDepsHandler,
createBuildRefreshAnilistClientSecretStateMainDepsHandler,
createBuildResetAnilistMediaGuessStateMainDepsHandler,
createBuildResetAnilistMediaTrackingMainDepsHandler,
createBuildSetAnilistMediaGuessRuntimeStateMainDepsHandler,
createEnsureAnilistMediaGuessHandler,
createGetAnilistMediaGuessRuntimeStateHandler,
createGetCurrentAnilistMediaKeyHandler,
createMaybeFocusExistingAnilistSetupWindowHandler,
createMaybeProbeAnilistDurationHandler,
createMaybeRunAnilistPostWatchUpdateHandler,
createOpenAnilistSetupWindowHandler,
createProcessNextAnilistRetryUpdateHandler,
createRefreshAnilistClientSecretStateHandler,
createResetAnilistMediaGuessStateHandler,
createResetAnilistMediaTrackingHandler,
createSetAnilistMediaGuessRuntimeStateHandler,
findAnilistSetupDeepLinkArgvUrl,
isAnilistTrackingEnabled,
loadAnilistManualTokenEntry,
loadAnilistSetupFallback,
openAnilistSetupInBrowser,
rememberAnilistAttemptedUpdateKey,
} from './main/runtime/domains/anilist';
@@ -153,50 +130,9 @@ import {
createApplyJellyfinMpvDefaultsHandler,
createBuildApplyJellyfinMpvDefaultsMainDepsHandler,
createBuildGetDefaultSocketPathMainDepsHandler,
createEnsureMpvConnectedForJellyfinPlaybackHandler,
createBuildEnsureMpvConnectedForJellyfinPlaybackMainDepsHandler,
createGetDefaultSocketPathHandler,
createGetJellyfinClientInfoHandler,
createBuildGetJellyfinClientInfoMainDepsHandler,
createHandleJellyfinAuthCommands,
createBuildHandleJellyfinAuthCommandsMainDepsHandler,
createHandleJellyfinListCommands,
createBuildHandleJellyfinListCommandsMainDepsHandler,
createHandleJellyfinPlayCommand,
createBuildHandleJellyfinPlayCommandMainDepsHandler,
createHandleJellyfinRemoteAnnounceCommand,
createBuildHandleJellyfinRemoteAnnounceCommandMainDepsHandler,
createHandleJellyfinRemotePlay,
createBuildHandleJellyfinRemotePlayMainDepsHandler,
createHandleJellyfinRemotePlaystate,
createBuildHandleJellyfinRemotePlaystateMainDepsHandler,
createHandleJellyfinRemoteGeneralCommand,
createBuildHandleJellyfinRemoteGeneralCommandMainDepsHandler,
createLaunchMpvIdleForJellyfinPlaybackHandler,
createBuildLaunchMpvIdleForJellyfinPlaybackMainDepsHandler,
createPlayJellyfinItemInMpvHandler,
createBuildPlayJellyfinItemInMpvMainDepsHandler,
createPreloadJellyfinExternalSubtitlesHandler,
createBuildPreloadJellyfinExternalSubtitlesMainDepsHandler,
createReportJellyfinRemoteProgressHandler,
createBuildReportJellyfinRemoteProgressMainDepsHandler,
createReportJellyfinRemoteStoppedHandler,
createBuildReportJellyfinRemoteStoppedMainDepsHandler,
createStartJellyfinRemoteSessionHandler,
createBuildStartJellyfinRemoteSessionMainDepsHandler,
createStopJellyfinRemoteSessionHandler,
createBuildStopJellyfinRemoteSessionMainDepsHandler,
createRunJellyfinCommandHandler,
createBuildRunJellyfinCommandMainDepsHandler,
createWaitForMpvConnectedHandler,
createBuildWaitForMpvConnectedMainDepsHandler,
createOpenJellyfinSetupWindowHandler,
createBuildOpenJellyfinSetupWindowMainDepsHandler,
createGetResolvedJellyfinConfigHandler,
createBuildGetResolvedJellyfinConfigMainDepsHandler,
parseJellyfinSetupSubmissionUrl,
buildJellyfinSetupFormHtml,
createMaybeFocusExistingJellyfinSetupWindowHandler,
parseJellyfinSetupSubmissionUrl,
} from './main/runtime/domains/jellyfin';
import type { ActiveJellyfinRemotePlaybackState } from './main/runtime/domains/jellyfin';
import { getConfiguredJellyfinSession } from './main/runtime/domains/jellyfin';
@@ -226,7 +162,6 @@ import {
createBuildEnsureOverlayWindowLevelMainDepsHandler,
createBuildUpdateVisibleOverlayBoundsMainDepsHandler,
createOverlayWindowRuntimeHandlers,
createOverlayRuntimeBootstrapHandlers,
createTrayRuntimeHandlers,
createOverlayVisibilityRuntime,
createBroadcastRuntimeOptionsChangedHandler,
@@ -268,25 +203,11 @@ import {
createConfigDerivedRuntime,
appendClipboardVideoToQueueRuntime,
createMainSubsyncRuntime,
createLaunchBackgroundWarmupTaskHandler,
createStartBackgroundWarmupsHandler,
createBuildLaunchBackgroundWarmupTaskMainDepsHandler,
createBuildStartBackgroundWarmupsMainDepsHandler,
} from './main/runtime/domains/startup';
import {
createBuildBindMpvMainEventHandlersMainDepsHandler,
createBuildMpvClientRuntimeServiceFactoryDepsHandler,
createMpvClientRuntimeServiceFactory,
createBindMpvMainEventHandlersHandler,
createBuildTokenizerDepsMainHandler,
createCreateMecabTokenizerAndCheckMainHandler,
createPrewarmSubtitleDictionariesMainHandler,
createUpdateMpvSubtitleRenderMetricsHandler,
createBuildUpdateMpvSubtitleRenderMetricsMainDepsHandler,
createMpvOsdRuntimeHandlers,
createCycleSecondarySubModeRuntimeHandler,
} from './main/runtime/domains/mpv';
import type { MpvClientRuntimeServiceOptions } from './main/runtime/domains/mpv';
import {
createBuildCopyCurrentSubtitleMainDepsHandler,
createBuildHandleMineSentenceDigitMainDepsHandler,
@@ -324,7 +245,6 @@ import {
MpvIpcClient,
SubtitleWebSocket,
Texthooker,
DEFAULT_MPV_SUBTITLE_RENDER_METRICS,
applyMpvSubtitleRenderMetricsPatch,
authenticateWithPasswordRuntime,
broadcastRuntimeOptionsChangedRuntime,
@@ -424,7 +344,6 @@ import { createCharacterDictionaryRuntimeService } from './main/character-dictio
import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync';
import {
type AnilistMediaGuessRuntimeState,
type AppState,
type StartupState,
applyStartupState,
createAppState,
@@ -1455,13 +1374,8 @@ function shouldInitializeMecabForAnnotations(): boolean {
const {
getResolvedJellyfinConfig,
getJellyfinClientInfo,
reportJellyfinRemoteProgress,
reportJellyfinRemoteStopped,
handleJellyfinRemotePlay,
handleJellyfinRemotePlaystate,
handleJellyfinRemoteGeneralCommand,
playJellyfinItemInMpv,
startJellyfinRemoteSession,
stopJellyfinRemoteSession,
runJellyfinCommand,
@@ -2184,7 +2098,7 @@ const ensureImmersionTrackerStarted = (): void => {
createImmersionTrackerStartup();
};
const { reloadConfig: reloadConfigHandler, appReadyRuntimeRunner } = composeAppReadyRuntime({
const { appReadyRuntimeRunner } = composeAppReadyRuntime({
reloadConfigMainDeps: {
reloadConfigStrict: () => configService.reloadConfigStrict(),
logInfo: (message) => appLogger.logInfo(message),
@@ -2270,6 +2184,7 @@ const { reloadConfig: reloadConfigHandler, appReadyRuntimeRunner } = composeAppR
appState.backgroundMode
? false
: configDerivedRuntime.shouldAutoInitializeOverlayRuntimeFromConfig(),
setVisibleOverlayVisible: (visible: boolean) => setVisibleOverlayVisible(visible),
initializeOverlayRuntime: () => initializeOverlayRuntime(),
handleInitialArgs: () => handleInitialArgs(),
shouldSkipHeavyStartup: () =>
@@ -2288,7 +2203,7 @@ const { reloadConfig: reloadConfigHandler, appReadyRuntimeRunner } = composeAppR
immersionTrackerStartupMainDeps,
});
const { appLifecycleRuntimeRunner, runAndApplyStartupState } =
const { runAndApplyStartupState } =
runtimeRegistry.startup.createStartupRuntimeHandlers<
CliArgs,
StartupState,
@@ -2386,7 +2301,6 @@ function handleInitialArgs(): void {
}
const {
bindMpvClientEventHandlers,
createMpvClientRuntimeService: createMpvClientRuntimeServiceHandler,
updateMpvSubtitleRenderMetrics: updateMpvSubtitleRenderMetricsHandler,
tokenizeSubtitle,
@@ -2522,6 +2436,8 @@ const {
'subtitle.annotation.jlpt',
getResolvedConfig().subtitleStyle.enableJlpt,
),
getCharacterDictionaryEnabled: () => getResolvedConfig().anilist.characterDictionary.enabled,
getNameMatchEnabled: () => getResolvedConfig().subtitleStyle.nameMatchEnabled,
getFrequencyDictionaryEnabled: () =>
getRuntimeBooleanOption(
'subtitle.annotation.frequency',
@@ -2758,10 +2674,6 @@ async function syncYomitanDefaultProfileAnkiServer(): Promise<void> {
}
}
function createOverlayWindow(kind: 'visible' | 'modal'): BrowserWindow {
return createOverlayWindowHandler(kind);
}
function createModalWindow(): BrowserWindow {
const existingWindow = overlayManager.getModalWindow();
if (existingWindow && !existingWindow.isDestroyed()) {
@@ -2775,13 +2687,6 @@ function createModalWindow(): BrowserWindow {
function createMainWindow(): BrowserWindow {
return createMainWindowHandler();
}
function resolveTrayIconPath(): string | null {
return resolveTrayIconPathHandler();
}
function buildTrayMenu(): Menu {
return buildTrayMenuHandler();
}
function ensureTray(): void {
ensureTrayHandler();
@@ -2808,8 +2713,6 @@ const {
startPendingMultiCopy,
cancelPendingMineSentenceMultiple,
startPendingMineSentenceMultiple,
registerOverlayShortcuts,
unregisterOverlayShortcuts,
syncOverlayShortcuts,
refreshOverlayShortcuts,
} = composeShortcutRuntimes({
@@ -2848,7 +2751,7 @@ const {
},
});
const { appendToMpvLog, flushMpvLog, showMpvOsd } = createMpvOsdRuntimeHandlers({
const { flushMpvLog, showMpvOsd } = createMpvOsdRuntimeHandlers({
appendToMpvLogMainDeps: {
logPath: DEFAULT_MPV_LOG_PATH,
dirname: (targetPath) => path.dirname(targetPath),
@@ -3005,7 +2908,6 @@ const {
setVisibleOverlayVisible: setVisibleOverlayVisibleHandler,
toggleVisibleOverlay: toggleVisibleOverlayHandler,
setOverlayVisible: setOverlayVisibleHandler,
toggleOverlay: toggleOverlayHandler,
} = createOverlayVisibilityRuntime({
setVisibleOverlayVisibleDeps: {
setVisibleOverlayVisibleCore,
@@ -3065,11 +2967,7 @@ const shiftSubtitleDelayToAdjacentCueHandler = createShiftSubtitleDelayToAdjacen
showMpvOsd: (text) => showMpvOsd(text),
});
const {
handleMpvCommandFromIpc: handleMpvCommandFromIpcHandler,
runSubsyncManualFromIpc: runSubsyncManualFromIpcHandler,
registerIpcRuntimeHandlers,
} = composeIpcRuntimeHandlers({
const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({
mpvCommandMainDeps: {
triggerSubsyncFromConfig: () => triggerSubsyncFromConfig(),
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
@@ -3224,11 +3122,8 @@ const createCliCommandContextHandler = createCliCommandContextFactory({
logWarn: (message: string) => logger.warn(message),
logError: (message: string, err: unknown) => logger.error(message, err),
});
const {
createOverlayWindow: createOverlayWindowHandler,
createMainWindow: createMainWindowHandler,
createModalWindow: createModalWindowHandler,
} = createOverlayWindowRuntimeHandlers<BrowserWindow>({
const { createMainWindow: createMainWindowHandler, createModalWindow: createModalWindowHandler } =
createOverlayWindowRuntimeHandlers<BrowserWindow>({
createOverlayWindowDeps: {
createOverlayWindowCore: (kind, options) => createOverlayWindowCore(kind, options),
isDev,
@@ -3250,12 +3145,8 @@ const {
setMainWindow: (window) => overlayManager.setMainWindow(window),
setModalWindow: (window) => overlayManager.setModalWindow(window),
});
const {
resolveTrayIconPath: resolveTrayIconPathHandler,
buildTrayMenu: buildTrayMenuHandler,
ensureTray: ensureTrayHandler,
destroyTray: destroyTrayHandler,
} = createTrayRuntimeHandlers({
const { ensureTray: ensureTrayHandler, destroyTray: destroyTrayHandler } =
createTrayRuntimeHandlers({
resolveTrayIconPathDeps: {
resolveTrayIconPathRuntime,
platform: process.platform,
@@ -3436,25 +3327,10 @@ function setOverlayVisible(visible: boolean): void {
setOverlayVisibleHandler(visible);
syncOverlayMpvSubtitleSuppression();
}
function toggleOverlay(): void {
if (!overlayManager.getVisibleOverlayVisible()) {
void ensureOverlayMpvSubtitlesHidden();
}
toggleOverlayHandler();
syncOverlayMpvSubtitleSuppression();
}
function handleOverlayModalClosed(modal: OverlayHostedModal): void {
handleOverlayModalClosedHandler(modal);
}
function handleMpvCommandFromIpc(command: (string | number)[]): void {
handleMpvCommandFromIpcHandler(command);
}
async function runSubsyncManualFromIpc(request: SubsyncManualRunRequest): Promise<SubsyncResult> {
return runSubsyncManualFromIpcHandler(request) as Promise<SubsyncResult>;
}
function appendClipboardVideoToQueue(): { ok: boolean; message: string } {
return appendClipboardVideoToQueueHandler();
}

View File

@@ -43,6 +43,7 @@ export interface AppReadyRuntimeDepsFactoryInput {
startBackgroundWarmups: AppReadyRuntimeDeps['startBackgroundWarmups'];
texthookerOnlyMode: AppReadyRuntimeDeps['texthookerOnlyMode'];
shouldAutoInitializeOverlayRuntimeFromConfig: AppReadyRuntimeDeps['shouldAutoInitializeOverlayRuntimeFromConfig'];
setVisibleOverlayVisible: AppReadyRuntimeDeps['setVisibleOverlayVisible'];
initializeOverlayRuntime: AppReadyRuntimeDeps['initializeOverlayRuntime'];
handleInitialArgs: AppReadyRuntimeDeps['handleInitialArgs'];
onCriticalConfigErrors?: AppReadyRuntimeDeps['onCriticalConfigErrors'];
@@ -99,6 +100,7 @@ export function createAppReadyRuntimeDeps(
texthookerOnlyMode: params.texthookerOnlyMode,
shouldAutoInitializeOverlayRuntimeFromConfig:
params.shouldAutoInitializeOverlayRuntimeFromConfig,
setVisibleOverlayVisible: params.setVisibleOverlayVisible,
initializeOverlayRuntime: params.initializeOverlayRuntime,
handleInitialArgs: params.handleInitialArgs,
onCriticalConfigErrors: params.onCriticalConfigErrors,

View File

@@ -111,7 +111,7 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
node: {
id: 123,
description:
'__Race:__ Human Alexia Midgar is the second princess of the Kingdom of Midgar.',
'__Race:__ Human\nAlexia Midgar is the second princess of the Kingdom of Midgar.',
image: {
large: 'https://example.com/alexia.png',
medium: null,
@@ -160,8 +160,19 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8')) as Array<
[string, string, string, string, number, Array<string | Record<string, unknown>>, number, string]
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const alexia = termBank.find(([term]) => term === 'アレクシア');
@@ -171,24 +182,66 @@ test('generateForCurrentMedia emits structured-content glossary so image stays w
const entry = glossary[0] as {
type: string;
content: unknown[];
content: { tag: string; content: Array<Record<string, unknown>> };
};
assert.equal(entry.type, 'structured-content');
assert.equal(Array.isArray(entry.content), true);
const image = entry.content[0] as Record<string, unknown>;
const wrapper = entry.content;
assert.equal(wrapper.tag, 'div');
const children = wrapper.content;
const nameDiv = children[0] as { tag: string; content: string };
assert.equal(nameDiv.tag, 'div');
assert.equal(nameDiv.content, 'アレクシア・ミドガル');
const secondaryNameDiv = children[1] as { tag: string; content: string };
assert.equal(secondaryNameDiv.tag, 'div');
assert.equal(secondaryNameDiv.content, 'Alexia Midgar');
const imageWrap = children[2] as { tag: string; content: Record<string, unknown> };
assert.equal(imageWrap.tag, 'div');
const image = imageWrap.content as Record<string, unknown>;
assert.equal(image.tag, 'img');
assert.equal(image.path, 'img/m130298-c123.png');
assert.equal(image.sizeUnits, 'em');
const descriptionLine = entry.content[5];
assert.equal(
descriptionLine,
'Race: Human Alexia Midgar is the second princess of the Kingdom of Midgar.',
const sourceDiv = children[3] as { tag: string; content: string };
assert.equal(sourceDiv.tag, 'div');
assert.ok(sourceDiv.content.includes('The Eminence in Shadow'));
const roleBadgeDiv = children[4] as { tag: string; content: Record<string, unknown> };
assert.equal(roleBadgeDiv.tag, 'div');
const badge = roleBadgeDiv.content as { tag: string; content: string };
assert.equal(badge.tag, 'span');
assert.equal(badge.content, 'Side Character');
const descSection = children.find(
(c) =>
(c as { tag?: string }).tag === 'details' &&
Array.isArray((c as { content?: unknown[] }).content) &&
(c as { content: Array<{ content?: string }> }).content[0]?.content === 'Description',
) as { tag: string; content: Array<Record<string, unknown>> } | undefined;
assert.ok(descSection, 'expected Description collapsible section');
const descBody = descSection.content[1] as { content: string };
assert.ok(
descBody.content.includes('Alexia Midgar is the second princess of the Kingdom of Midgar.'),
);
const infoSection = children.find(
(c) =>
(c as { tag?: string }).tag === 'details' &&
Array.isArray((c as { content?: unknown[] }).content) &&
(c as { content: Array<{ content?: string }> }).content[0]?.content ===
'Character Information',
) as { tag: string; content: Array<Record<string, unknown>> } | undefined;
assert.ok(
infoSection,
'expected Character Information collapsible section with parsed __Race:__ field',
);
const topLevelImageGlossaryEntry = glossary.find(
(item) => typeof item === 'object' && item !== null && (item as { type?: string }).type === 'image',
(item) =>
typeof item === 'object' && item !== null && (item as { type?: string }).type === 'image',
);
assert.equal(topLevelImageGlossaryEntry, undefined);
} finally {
@@ -289,8 +342,19 @@ test('generateForCurrentMedia adds kana aliases for romanized names when native
});
const result = await runtime.generateForCurrentMedia();
const termBank = JSON.parse(readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8')) as Array<
[string, string, string, string, number, Array<string | Record<string, unknown>>, number, string]
const termBank = JSON.parse(
readStoredZipEntry(result.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const kazuma = termBank.find(([term]) => term === 'カズマ');
@@ -433,7 +497,16 @@ test('getOrCreateCurrentSnapshot persists and reuses normalized snapshot data',
mediaId: number;
entryCount: number;
termEntries: Array<
[string, string, string, string, number, Array<string | Record<string, unknown>>, number, string]
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
};
assert.equal(snapshot.mediaId, 130298);
@@ -567,12 +640,27 @@ test('getOrCreateCurrentSnapshot rebuilds snapshots written with an older format
const snapshot = JSON.parse(fs.readFileSync(snapshotPath, 'utf8')) as {
formatVersion: number;
termEntries: Array<
[string, string, string, string, number, Array<string | Record<string, unknown>>, number, string]
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
};
assert.equal(snapshot.formatVersion > 9, true);
assert.equal(snapshot.termEntries.some(([term]) => term === 'アルファ'), true);
assert.equal(snapshot.termEntries.some(([term]) => term === 'stale'), false);
assert.equal(
snapshot.termEntries.some(([term]) => term === 'アルファ'),
true,
);
assert.equal(
snapshot.termEntries.some(([term]) => term === 'stale'),
false,
);
} finally {
globalThis.fetch = originalFetch;
}
@@ -693,7 +781,7 @@ test('generateForCurrentMedia logs progress while resolving and rebuilding snaps
'[dictionary] AniList match: The Eminence in Shadow -> AniList 130298',
'[dictionary] snapshot miss for AniList 130298, fetching characters',
'[dictionary] downloaded AniList character page 1 for AniList 130298',
'[dictionary] downloading 1 character images for AniList 130298',
'[dictionary] downloading 1 images for AniList 130298',
'[dictionary] stored snapshot for AniList 130298: 32 terms',
'[dictionary] building ZIP for AniList 130298',
'[dictionary] generated AniList 130298: 32 terms -> ' +
@@ -704,6 +792,168 @@ test('generateForCurrentMedia logs progress while resolving and rebuilding snaps
}
});
test('generateForCurrentMedia downloads shared voice actor images once per AniList person id', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
const fetchedImageUrls: string[] = [];
globalThis.fetch = (async (input: string | URL | Request, init?: RequestInit) => {
const url = typeof input === 'string' ? input : input instanceof URL ? input.href : input.url;
if (url === GRAPHQL_URL) {
const body = JSON.parse(String(init?.body ?? '{}')) as {
query?: string;
};
if (body.query?.includes('Page(perPage: 10)')) {
return new Response(
JSON.stringify({
data: {
Page: {
media: [
{
id: 130298,
episodes: 20,
title: {
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
english: 'The Eminence in Shadow',
native: '陰の実力者になりたくて!',
},
},
],
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
if (body.query?.includes('characters(page: $page')) {
return new Response(
JSON.stringify({
data: {
Media: {
title: {
romaji: 'Kage no Jitsuryokusha ni Naritakute!',
english: 'The Eminence in Shadow',
native: '陰の実力者になりたくて!',
},
characters: {
pageInfo: { hasNextPage: false },
edges: [
{
role: 'MAIN',
voiceActors: [
{
id: 9001,
name: {
full: 'Kana Hanazawa',
native: '花澤香菜',
},
image: {
large: null,
medium: 'https://example.com/kana.png',
},
},
],
node: {
id: 321,
description: 'Alpha is the second-in-command of Shadow Garden.',
image: {
large: 'https://example.com/alpha.png',
medium: null,
},
name: {
full: 'Alpha',
native: 'アルファ',
},
},
},
{
role: 'SUPPORTING',
voiceActors: [
{
id: 9001,
name: {
full: 'Kana Hanazawa',
native: '花澤香菜',
},
image: {
large: null,
medium: 'https://example.com/kana.png',
},
},
],
node: {
id: 654,
description: 'Beta documents Shadow Garden operations.',
image: {
large: 'https://example.com/beta.png',
medium: null,
},
name: {
full: 'Beta',
native: 'ベータ',
},
},
},
],
},
},
},
}),
{
status: 200,
headers: { 'content-type': 'application/json' },
},
);
}
}
if (
url === 'https://example.com/alpha.png' ||
url === 'https://example.com/beta.png' ||
url === 'https://example.com/kana.png'
) {
fetchedImageUrls.push(url);
return new Response(PNG_1X1, {
status: 200,
headers: { 'content-type': 'image/png' },
});
}
throw new Error(`Unexpected fetch URL: ${url}`);
}) as typeof globalThis.fetch;
try {
const runtime = createCharacterDictionaryRuntimeService({
userDataPath,
getCurrentMediaPath: () => '/tmp/eminence-s01e05.mkv',
getCurrentMediaTitle: () => 'The Eminence in Shadow - S01E05',
resolveMediaPathForJimaku: (mediaPath) => mediaPath,
guessAnilistMediaInfo: async () => ({
title: 'The Eminence in Shadow',
episode: 5,
source: 'fallback',
}),
now: () => 1_700_000_000_100,
sleep: async () => undefined,
});
await runtime.generateForCurrentMedia();
assert.deepEqual(fetchedImageUrls, [
'https://example.com/alpha.png',
'https://example.com/kana.png',
'https://example.com/beta.png',
]);
} finally {
globalThis.fetch = originalFetch;
}
});
test('buildMergedDictionary combines stored snapshots into one stable dictionary', async () => {
const userDataPath = makeTempDir();
const originalFetch = globalThis.fetch;
@@ -880,8 +1130,19 @@ test('buildMergedDictionary combines stored snapshots into one stable dictionary
const index = JSON.parse(readStoredZipEntry(merged.zipPath, 'index.json').toString('utf8')) as {
title: string;
};
const termBank = JSON.parse(readStoredZipEntry(merged.zipPath, 'term_bank_1.json').toString('utf8')) as Array<
[string, string, string, string, number, Array<string | Record<string, unknown>>, number, string]
const termBank = JSON.parse(
readStoredZipEntry(merged.zipPath, 'term_bank_1.json').toString('utf8'),
) as Array<
[
string,
string,
string,
string,
number,
Array<string | Record<string, unknown>>,
number,
string,
]
>;
const frieren = termBank.find(([term]) => term === 'フリーレン');
const alpha = termBank.find(([term]) => term === 'アルファ');
@@ -1031,7 +1292,10 @@ test('generateForCurrentMedia paces AniList requests and character image downloa
await runtime.generateForCurrentMedia();
assert.deepEqual(sleepCalls, [2000, 250]);
assert.deepEqual(imageRequests, ['https://example.com/alpha.png', 'https://example.com/beta.png']);
assert.deepEqual(imageRequests, [
'https://example.com/alpha.png',
'https://example.com/beta.png',
]);
} finally {
globalThis.fetch = originalFetch;
}

View File

@@ -54,7 +54,7 @@ export type CharacterDictionarySnapshot = {
images: CharacterDictionarySnapshotImage[];
};
const CHARACTER_DICTIONARY_FORMAT_VERSION = 10;
const CHARACTER_DICTIONARY_FORMAT_VERSION = 12;
const CHARACTER_DICTIONARY_MERGED_TITLE = 'SubMiner Character Dictionary';
type AniListSearchResponse = {
@@ -84,6 +84,17 @@ type AniListCharacterPageResponse = {
};
edges?: Array<{
role?: string | null;
voiceActors?: Array<{
id: number;
name?: {
full?: string | null;
native?: string | null;
} | null;
image?: {
large?: string | null;
medium?: string | null;
} | null;
}> | null;
node?: {
id: number;
description?: string | null;
@@ -101,6 +112,13 @@ type AniListCharacterPageResponse = {
} | null;
};
type VoiceActorRecord = {
id: number;
fullName: string;
nativeName: string;
imageUrl: string | null;
};
type CharacterRecord = {
id: number;
role: CharacterDictionaryRole;
@@ -108,6 +126,7 @@ type CharacterRecord = {
nativeName: string;
description: string;
imageUrl: string | null;
voiceActors: VoiceActorRecord[];
};
type ZipEntry = {
@@ -430,20 +449,13 @@ function romanizedTokenToKatakana(token: string): string | null {
continue;
}
if (
current === 'n' &&
next.length > 0 &&
next !== 'y' &&
!'aeiou'.includes(next)
) {
if (current === 'n' && next.length > 0 && next !== 'y' && !'aeiou'.includes(next)) {
output += 'ン';
i += 1;
continue;
}
const digraph = ROMANIZED_KANA_DIGRAPHS.find(([romaji]) =>
normalized.startsWith(romaji, i),
);
const digraph = ROMANIZED_KANA_DIGRAPHS.find(([romaji]) => normalized.startsWith(romaji, i));
if (digraph) {
output += digraph[1];
i += digraph[0].length;
@@ -531,14 +543,34 @@ function buildNameTerms(character: CharacterRecord): string[] {
return [...withHonorifics].filter((entry) => entry.trim().length > 0);
}
function stripDescription(value: string): string {
return value.replace(/<[^>]+>/g, ' ').replace(/\s+/g, ' ').trim();
}
function parseCharacterDescription(raw: string): {
fields: Array<{ key: string; value: string }>;
text: string;
} {
const cleaned = raw.replace(/<br\s*\/?>/gi, '\n').replace(/<[^>]+>/g, ' ');
const lines = cleaned.split(/\n/);
const fields: Array<{ key: string; value: string }> = [];
const textLines: string[] = [];
function normalizeDescription(value: string): string {
const stripped = stripDescription(value);
if (!stripped) return '';
return stripped
for (const line of lines) {
const trimmed = line.trim();
if (!trimmed) continue;
const match = trimmed.match(/^__([^_]+):__\s*(.+)$/);
if (match) {
const value = match[2]!
.replace(/__([^_]+)__/g, '$1')
.replace(/\*\*([^*]+)\*\*/g, '$1')
.replace(/_([^_]+)_/g, '$1')
.replace(/\*([^*]+)\*/g, '$1')
.trim();
fields.push({ key: match[1]!.trim(), value });
} else {
textLines.push(trimmed);
}
}
const text = textLines
.join(' ')
.replace(/\[([^\]]+)\]\((https?:\/\/[^)\s]+)\)/g, '$1')
.replace(/https?:\/\/\S+/g, '')
.replace(/__([^_]+)__/g, '$1')
@@ -547,6 +579,8 @@ function normalizeDescription(value: string): string {
.replace(/!~/g, '')
.replace(/\s+/g, ' ')
.trim();
return { fields, text };
}
function roleInfo(role: CharacterDictionaryRole): { tag: string; score: number } {
@@ -708,25 +742,142 @@ function writeSnapshot(snapshotPath: string, snapshot: CharacterDictionarySnapsh
fs.writeFileSync(snapshotPath, JSON.stringify(snapshot, null, 2), 'utf8');
}
function roleBadgeStyle(role: CharacterDictionaryRole): Record<string, string> {
const base = {
borderRadius: '4px',
padding: '0.15em 0.5em',
fontSize: '0.8em',
fontWeight: 'bold',
color: '#fff',
};
if (role === 'main') return { ...base, backgroundColor: '#4a8c3f' };
if (role === 'primary') return { ...base, backgroundColor: '#5c82b0' };
if (role === 'side') return { ...base, backgroundColor: '#7889a0' };
return { ...base, backgroundColor: '#777' };
}
function buildCollapsibleSection(
title: string,
body: Array<string | Record<string, unknown>> | string | Record<string, unknown>,
): Record<string, unknown> {
return {
tag: 'details',
open: true,
style: { marginTop: '0.4em' },
content: [
{
tag: 'summary',
style: { fontWeight: 'bold', fontSize: '0.95em', cursor: 'pointer' },
content: title,
},
{
tag: 'div',
style: { padding: '0.25em 0 0 0.4em', fontSize: '0.9em' },
content: body,
},
],
};
}
function buildVoicedByContent(
voiceActors: VoiceActorRecord[],
vaImagePaths: Map<number, string>,
): Record<string, unknown> {
if (voiceActors.length === 1) {
const va = voiceActors[0]!;
const vaImgPath = vaImagePaths.get(va.id);
const vaLabel = va.nativeName
? va.fullName
? `${va.nativeName} (${va.fullName})`
: va.nativeName
: va.fullName;
if (vaImgPath) {
return {
tag: 'table',
content: {
tag: 'tr',
content: [
{
tag: 'td',
style: {
verticalAlign: 'top',
padding: '0',
paddingRight: '0.4em',
borderWidth: '0',
},
content: {
tag: 'img',
path: vaImgPath,
width: 3,
height: 3,
sizeUnits: 'em',
title: vaLabel,
alt: vaLabel,
collapsed: false,
collapsible: false,
background: true,
},
},
{
tag: 'td',
style: { verticalAlign: 'middle', padding: '0', borderWidth: '0' },
content: vaLabel,
},
],
},
};
}
return { tag: 'div', content: vaLabel };
}
const items: Array<Record<string, unknown>> = [];
for (const va of voiceActors) {
const vaLabel = va.nativeName
? va.fullName
? `${va.nativeName} (${va.fullName})`
: va.nativeName
: va.fullName;
items.push({ tag: 'li', content: vaLabel });
}
return { tag: 'ul', style: { marginTop: '0.15em' }, content: items };
}
function createDefinitionGlossary(
character: CharacterRecord,
mediaTitle: string,
imagePath: string | null,
vaImagePaths: Map<number, string>,
): CharacterDictionaryGlossaryEntry[] {
const displayName = character.nativeName || character.fullName || `Character ${character.id}`;
const lines: string[] = [`${displayName} [${roleLabel(character.role)}]`, `${mediaTitle} · AniList`];
const description = normalizeDescription(character.description);
if (description) {
lines.push(description);
}
if (!imagePath) {
return [lines.join('\n')];
}
const secondaryName =
character.nativeName && character.fullName && character.fullName !== character.nativeName
? character.fullName
: null;
const { fields, text: descriptionText } = parseCharacterDescription(character.description);
const content: Array<string | Record<string, unknown>> = [
{
tag: 'div',
style: { fontWeight: 'bold', fontSize: '1.1em', marginBottom: '0.1em' },
content: displayName,
},
];
if (secondaryName) {
content.push({
tag: 'div',
style: { fontSize: '0.85em', fontStyle: 'italic', color: '#b0b0b0', marginBottom: '0.2em' },
content: secondaryName,
});
}
if (imagePath) {
content.push({
tag: 'div',
style: { marginTop: '0.3em', marginBottom: '0.3em' },
content: {
tag: 'img',
path: imagePath,
width: 8,
@@ -739,19 +890,56 @@ function createDefinitionGlossary(
collapsible: false,
background: true,
},
];
for (let i = 0; i < lines.length; i += 1) {
if (i > 0) {
content.push({ tag: 'br' });
});
}
content.push(lines[i]!);
content.push({
tag: 'div',
style: { fontSize: '0.8em', color: '#999', marginBottom: '0.2em' },
content: `From: ${mediaTitle}`,
});
content.push({
tag: 'div',
style: { marginBottom: '0.15em' },
content: {
tag: 'span',
style: roleBadgeStyle(character.role),
content: `${roleLabel(character.role)} Character`,
},
});
if (descriptionText) {
content.push(buildCollapsibleSection('Description', descriptionText));
}
if (fields.length > 0) {
const fieldItems: Array<Record<string, unknown>> = fields.map((f) => ({
tag: 'li',
content: `${f.key}: ${f.value}`,
}));
content.push(
buildCollapsibleSection('Character Information', {
tag: 'ul',
style: { marginTop: '0.15em' },
content: fieldItems,
}),
);
}
if (character.voiceActors.length > 0) {
content.push(
buildCollapsibleSection(
'Voiced by',
buildVoicedByContent(character.voiceActors, vaImagePaths),
),
);
}
return [
{
type: 'structured-content',
content,
content: { tag: 'div', content },
},
];
}
@@ -760,6 +948,10 @@ function buildSnapshotImagePath(mediaId: number, charId: number, ext: string): s
return `img/m${mediaId}-c${charId}.${ext}`;
}
function buildVaImagePath(mediaId: number, vaId: number, ext: string): string {
return `img/m${mediaId}-va${vaId}.${ext}`;
}
function buildTermEntry(
term: string,
reading: string,
@@ -998,6 +1190,16 @@ async function fetchCharactersForMedia(
}
edges {
role
voiceActors(language: JAPANESE) {
id
name {
full
native
}
image {
medium
}
}
node {
id
description(asHtml: false)
@@ -1042,6 +1244,19 @@ async function fetchCharactersForMedia(
const fullName = node.name?.full?.trim() || '';
const nativeName = node.name?.native?.trim() || '';
if (!fullName && !nativeName) continue;
const voiceActors: VoiceActorRecord[] = [];
for (const va of edge?.voiceActors ?? []) {
if (!va || typeof va.id !== 'number') continue;
const vaFull = va.name?.full?.trim() || '';
const vaNative = va.name?.native?.trim() || '';
if (!vaFull && !vaNative) continue;
voiceActors.push({
id: va.id,
fullName: vaFull,
nativeName: vaNative,
imageUrl: va.image?.medium || null,
});
}
characters.push({
id: node.id,
role: mapRole(edge?.role),
@@ -1049,6 +1264,7 @@ async function fetchCharactersForMedia(
nativeName,
description: node.description || '',
imageUrl: node.image?.large || node.image?.medium || null,
voiceActors,
});
}
@@ -1065,7 +1281,10 @@ async function fetchCharactersForMedia(
};
}
async function downloadCharacterImage(imageUrl: string, charId: number): Promise<{
async function downloadCharacterImage(
imageUrl: string,
charId: number,
): Promise<{
filename: string;
ext: string;
bytes: Buffer;
@@ -1119,6 +1338,7 @@ function buildSnapshotFromCharacters(
mediaTitle: string,
characters: CharacterRecord[],
imagesByCharacterId: Map<number, CharacterDictionarySnapshotImage>,
imagesByVaId: Map<number, CharacterDictionarySnapshotImage>,
updatedAt: number,
): CharacterDictionarySnapshot {
const termEntries: CharacterDictionaryTermEntry[] = [];
@@ -1126,7 +1346,12 @@ function buildSnapshotFromCharacters(
for (const character of characters) {
const imagePath = imagesByCharacterId.get(character.id)?.path ?? null;
const glossary = createDefinitionGlossary(character, mediaTitle, imagePath);
const vaImagePaths = new Map<number, string>();
for (const va of character.voiceActors) {
const vaImg = imagesByVaId.get(va.id);
if (vaImg) vaImagePaths.set(va.id, vaImg.path);
}
const glossary = createDefinitionGlossary(character, mediaTitle, imagePath, vaImagePaths);
const candidateTerms = buildNameTerms(character);
for (const term of candidateTerms) {
const reading = buildReading(term);
@@ -1148,7 +1373,7 @@ function buildSnapshotFromCharacters(
entryCount: termEntries.length,
updatedAt,
termEntries,
images: [...imagesByCharacterId.values()],
images: [...imagesByCharacterId.values(), ...imagesByVaId.values()],
};
}
@@ -1163,7 +1388,10 @@ function buildDictionaryZip(
const zipFiles: Array<{ name: string; data: Buffer }> = [
{
name: 'index.json',
data: Buffer.from(JSON.stringify(createIndex(dictionaryTitle, description, revision), null, 2), 'utf8'),
data: Buffer.from(
JSON.stringify(createIndex(dictionaryTitle, description, revision), null, 2),
'utf8',
),
},
{
name: 'tag_bank_1.json',
@@ -1238,7 +1466,9 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
}
deps.logInfo?.(
`[dictionary] current anime guess: ${guessed.title.trim()}${
typeof guessed.episode === 'number' && guessed.episode > 0 ? ` (episode ${guessed.episode})` : ''
typeof guessed.episode === 'number' && guessed.episode > 0
? ` (episode ${guessed.episode})`
: ''
}`,
);
const resolved = await resolveAniListMediaIdFromGuess(guessed, beforeRequest);
@@ -1270,7 +1500,9 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
mediaId,
beforeRequest,
(page) => {
deps.logInfo?.(`[dictionary] downloaded AniList character page ${page} for AniList ${mediaId}`);
deps.logInfo?.(
`[dictionary] downloaded AniList character page ${page} for AniList ${mediaId}`,
);
},
);
if (characters.length === 0) {
@@ -1278,25 +1510,44 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
}
const imagesByCharacterId = new Map<number, CharacterDictionarySnapshotImage>();
const charactersWithImages = characters.filter((character) => Boolean(character.imageUrl)).length;
if (charactersWithImages > 0) {
const imagesByVaId = new Map<number, CharacterDictionarySnapshotImage>();
const allImageUrls: Array<{ id: number; url: string; kind: 'character' | 'va' }> = [];
const seenVaIds = new Set<number>();
for (const character of characters) {
if (character.imageUrl) {
allImageUrls.push({ id: character.id, url: character.imageUrl, kind: 'character' });
}
for (const va of character.voiceActors) {
if (va.imageUrl && !seenVaIds.has(va.id)) {
seenVaIds.add(va.id);
allImageUrls.push({ id: va.id, url: va.imageUrl, kind: 'va' });
}
}
}
if (allImageUrls.length > 0) {
deps.logInfo?.(
`[dictionary] downloading ${charactersWithImages} character images for AniList ${mediaId}`,
`[dictionary] downloading ${allImageUrls.length} images for AniList ${mediaId}`,
);
}
let hasAttemptedCharacterImageDownload = false;
for (const character of characters) {
if (!character.imageUrl) continue;
if (hasAttemptedCharacterImageDownload) {
let hasAttemptedImageDownload = false;
for (const entry of allImageUrls) {
if (hasAttemptedImageDownload) {
await sleepMs(CHARACTER_IMAGE_DOWNLOAD_DELAY_MS);
}
hasAttemptedCharacterImageDownload = true;
const image = await downloadCharacterImage(character.imageUrl, character.id);
hasAttemptedImageDownload = true;
const image = await downloadCharacterImage(entry.url, entry.id);
if (!image) continue;
imagesByCharacterId.set(character.id, {
path: buildSnapshotImagePath(mediaId, character.id, image.ext),
if (entry.kind === 'character') {
imagesByCharacterId.set(entry.id, {
path: buildSnapshotImagePath(mediaId, entry.id, image.ext),
dataBase64: image.bytes.toString('base64'),
});
} else {
imagesByVaId.set(entry.id, {
path: buildVaImagePath(mediaId, entry.id, image.ext),
dataBase64: image.bytes.toString('base64'),
});
}
}
const snapshot = buildSnapshotFromCharacters(
@@ -1304,6 +1555,7 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
fetchedMediaTitle || mediaTitleHint || `AniList ${mediaId}`,
characters,
imagesByCharacterId,
imagesByVaId,
deps.now(),
);
writeSnapshot(snapshotPath, snapshot);
@@ -1367,7 +1619,10 @@ export function createCharacterDictionaryRuntimeService(deps: CharacterDictionar
entryCount,
};
},
generateForCurrentMedia: async (targetPath?: string, _options?: CharacterDictionaryGenerateOptions) => {
generateForCurrentMedia: async (
targetPath?: string,
_options?: CharacterDictionaryGenerateOptions,
) => {
let hasAniListRequest = false;
const waitForAniListRequestSlot = async (): Promise<void> => {
if (!hasAniListRequest) {

View File

@@ -37,6 +37,7 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
startBackgroundWarmups: () => calls.push('start-warmups'),
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
setVisibleOverlayVisible: () => calls.push('set-visible-overlay'),
initializeOverlayRuntime: () => calls.push('init-overlay'),
handleInitialArgs: () => calls.push('handle-initial-args'),
onCriticalConfigErrors: () => {
@@ -58,6 +59,7 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
await onReady.loadYomitanExtension();
await onReady.prewarmSubtitleDictionaries?.();
onReady.startBackgroundWarmups();
onReady.setVisibleOverlayVisible(true);
assert.deepEqual(calls, [
'load-subtitle-position',
@@ -67,5 +69,6 @@ test('app-ready main deps builder returns mapped app-ready runtime deps', async
'load-yomitan',
'prewarm-dicts',
'start-warmups',
'set-visible-overlay',
]);
});

View File

@@ -26,6 +26,7 @@ export function createBuildAppReadyRuntimeMainDepsHandler(deps: AppReadyRuntimeD
startBackgroundWarmups: deps.startBackgroundWarmups,
texthookerOnlyMode: deps.texthookerOnlyMode,
shouldAutoInitializeOverlayRuntimeFromConfig: deps.shouldAutoInitializeOverlayRuntimeFromConfig,
setVisibleOverlayVisible: deps.setVisibleOverlayVisible,
initializeOverlayRuntime: deps.initializeOverlayRuntime,
handleInitialArgs: deps.handleInitialArgs,
onCriticalConfigErrors: deps.onCriticalConfigErrors,

View File

@@ -48,6 +48,7 @@ test('composeAppReadyRuntime returns reload/critical/app-ready handlers', () =>
startBackgroundWarmups: () => {},
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => false,
setVisibleOverlayVisible: () => {},
initializeOverlayRuntime: () => {},
handleInitialArgs: () => {},
logDebug: () => {},

View File

@@ -22,10 +22,13 @@ type RequiredMpvInputKeys = keyof ComposerInputs<
MpvRuntimeComposerOptions<FakeMpvClient, FakeTokenizerDeps, FakeTokenizedSubtitle>
>;
type _anilistHasNotifyDeps = Assert<IsAssignable<'notifyDeps', RequiredAnilistSetupInputKeys>>;
type _jellyfinHasGetMpvClient = Assert<IsAssignable<'getMpvClient', RequiredJellyfinInputKeys>>;
type _ipcHasRegistration = Assert<IsAssignable<'registration', RequiredIpcInputKeys>>;
type _mpvHasTokenizer = Assert<IsAssignable<'tokenizer', RequiredMpvInputKeys>>;
const contractAssertions = [
true as Assert<IsAssignable<'notifyDeps', RequiredAnilistSetupInputKeys>>,
true as Assert<IsAssignable<'getMpvClient', RequiredJellyfinInputKeys>>,
true as Assert<IsAssignable<'registration', RequiredIpcInputKeys>>,
true as Assert<IsAssignable<'tokenizer', RequiredMpvInputKeys>>,
];
void contractAssertions;
// @ts-expect-error missing required notifyDeps should fail compile-time contract
const anilistMissingRequired: AnilistSetupComposerOptions = {

View File

@@ -24,6 +24,7 @@ export function resolveSubtitleStyleForRenderer(config: ResolvedConfig) {
...config.subtitleStyle,
nPlusOneColor: config.ankiConnect.nPlusOne.nPlusOne,
knownWordColor: config.ankiConnect.nPlusOne.knownWord,
nameMatchColor: config.subtitleStyle.nameMatchColor,
enableJlpt: config.subtitleStyle.enableJlpt,
frequencyDictionary: config.subtitleStyle.frequencyDictionary,
};

View File

@@ -34,6 +34,7 @@ test('tokenizer deps builder records known-word lookups and maps readers', () =>
getMinSentenceWordsForNPlusOne: () => 3,
getJlptLevel: () => 'N2',
getJlptEnabled: () => true,
getNameMatchEnabled: () => false,
getFrequencyDictionaryEnabled: () => true,
getFrequencyDictionaryMatchMode: () => 'surface',
getFrequencyRank: () => 5,
@@ -48,10 +49,39 @@ test('tokenizer deps builder records known-word lookups and maps readers', () =>
deps.setYomitanParserInitPromise(null);
assert.equal(deps.getNPlusOneEnabled?.(), true);
assert.equal(deps.getMinSentenceWordsForNPlusOne?.(), 3);
assert.equal(deps.getNameMatchEnabled?.(), false);
assert.equal(deps.getFrequencyDictionaryMatchMode?.(), 'surface');
assert.deepEqual(calls, ['lookup:true', 'lookup:false', 'set-window', 'set-ready', 'set-init']);
});
test('tokenizer deps builder disables name matching when character dictionary is disabled', () => {
const deps = createBuildTokenizerDepsMainHandler({
getYomitanExt: () => null,
getYomitanParserWindow: () => null,
setYomitanParserWindow: () => undefined,
getYomitanParserReadyPromise: () => null,
setYomitanParserReadyPromise: () => undefined,
getYomitanParserInitPromise: () => null,
setYomitanParserInitPromise: () => undefined,
isKnownWord: () => false,
recordLookup: () => undefined,
getKnownWordMatchMode: () => 'surface',
getNPlusOneEnabled: () => true,
getMinSentenceWordsForNPlusOne: () => 3,
getJlptLevel: () => 'N2',
getJlptEnabled: () => true,
getCharacterDictionaryEnabled: () => false,
getNameMatchEnabled: () => true,
getFrequencyDictionaryEnabled: () => true,
getFrequencyDictionaryMatchMode: () => 'surface',
getFrequencyRank: () => 5,
getYomitanGroupDebugEnabled: () => false,
getMecabTokenizer: () => null,
})();
assert.equal(deps.getNameMatchEnabled?.(), false);
});
test('mecab tokenizer check creates tokenizer once and runs availability check', async () => {
const calls: string[] = [];
type Tokenizer = { id: string };

View File

@@ -2,6 +2,8 @@ import type { TokenizerDepsRuntimeOptions } from '../../core/services/tokenizer'
type TokenizerMainDeps = TokenizerDepsRuntimeOptions & {
getJlptEnabled: NonNullable<TokenizerDepsRuntimeOptions['getJlptEnabled']>;
getCharacterDictionaryEnabled?: () => boolean;
getNameMatchEnabled?: NonNullable<TokenizerDepsRuntimeOptions['getNameMatchEnabled']>;
getFrequencyDictionaryEnabled: NonNullable<
TokenizerDepsRuntimeOptions['getFrequencyDictionaryEnabled']
>;
@@ -43,6 +45,12 @@ export function createBuildTokenizerDepsMainHandler(deps: TokenizerMainDeps) {
getMinSentenceWordsForNPlusOne: () => deps.getMinSentenceWordsForNPlusOne(),
getJlptLevel: (text: string) => deps.getJlptLevel(text),
getJlptEnabled: () => deps.getJlptEnabled(),
...(deps.getNameMatchEnabled
? {
getNameMatchEnabled: () =>
deps.getCharacterDictionaryEnabled?.() !== false && deps.getNameMatchEnabled!(),
}
: {}),
getFrequencyDictionaryEnabled: () => deps.getFrequencyDictionaryEnabled(),
getFrequencyDictionaryMatchMode: () => deps.getFrequencyDictionaryMatchMode(),
getFrequencyRank: (text: string) => deps.getFrequencyRank(text),

View File

@@ -27,6 +27,7 @@ const HEX_COLOR_RE = /^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA
const FALLBACK_COLORS = {
knownWordColor: '#a6da95',
nPlusOneColor: '#c6a0f6',
nameMatchColor: '#f5bde6',
jlptN1Color: '#ed8796',
jlptN2Color: '#f5a97f',
jlptN3Color: '#f9e2af',
@@ -207,6 +208,7 @@ function buildBindingSections(keybindings: Keybinding[]): SessionHelpSection[] {
function buildColorSection(style: {
knownWordColor?: unknown;
nPlusOneColor?: unknown;
nameMatchColor?: unknown;
jlptColors?: {
N1?: unknown;
N2?: unknown;
@@ -228,6 +230,11 @@ function buildColorSection(style: {
action: normalizeColor(style.nPlusOneColor, FALLBACK_COLORS.nPlusOneColor),
color: normalizeColor(style.nPlusOneColor, FALLBACK_COLORS.nPlusOneColor),
},
{
shortcut: 'Character names',
action: normalizeColor(style.nameMatchColor, FALLBACK_COLORS.nameMatchColor),
color: normalizeColor(style.nameMatchColor, FALLBACK_COLORS.nameMatchColor),
},
{
shortcut: 'JLPT N1',
action: normalizeColor(style.jlptColors?.N1, FALLBACK_COLORS.jlptN1Color),

View File

@@ -58,6 +58,8 @@ export type RendererState = {
knownWordColor: string;
nPlusOneColor: string;
nameMatchEnabled: boolean;
nameMatchColor: string;
jlptN1Color: string;
jlptN2Color: string;
jlptN3Color: string;
@@ -125,6 +127,8 @@ export function createRendererState(): RendererState {
knownWordColor: '#a6da95',
nPlusOneColor: '#c6a0f6',
nameMatchEnabled: true,
nameMatchColor: '#f5bde6',
jlptN1Color: '#ed8796',
jlptN2Color: '#f5a97f',
jlptN3Color: '#f9e2af',
@@ -140,7 +144,7 @@ export function createRendererState(): RendererState {
frequencyDictionaryBand1Color: '#ed8796',
frequencyDictionaryBand2Color: '#f5a97f',
frequencyDictionaryBand3Color: '#f9e2af',
frequencyDictionaryBand4Color: '#a6e3a1',
frequencyDictionaryBand4Color: '#8bd5ca',
frequencyDictionaryBand5Color: '#8aadf4',
keybindingsMap: new Map(),

View File

@@ -285,6 +285,7 @@ body {
color: #cad3f5;
--subtitle-known-word-color: #a6da95;
--subtitle-n-plus-one-color: #c6a0f6;
--subtitle-name-match-color: #f5bde6;
--subtitle-jlpt-n1-color: #ed8796;
--subtitle-jlpt-n2-color: #f5a97f;
--subtitle-jlpt-n3-color: #f9e2af;
@@ -296,7 +297,7 @@ body {
--subtitle-frequency-band-1-color: #ed8796;
--subtitle-frequency-band-2-color: #f5a97f;
--subtitle-frequency-band-3-color: #f9e2af;
--subtitle-frequency-band-4-color: #a6e3a1;
--subtitle-frequency-band-4-color: #8bd5ca;
--subtitle-frequency-band-5-color: #8aadf4;
text-shadow:
2px 2px 4px rgba(0, 0, 0, 0.8),
@@ -416,6 +417,11 @@ body.settings-modal-open #subtitleContainer {
text-shadow: 0 0 6px rgba(198, 160, 246, 0.35);
}
#subtitleRoot .word.word-name-match {
color: var(--subtitle-name-match-color, #f5bde6);
text-shadow: 0 0 6px rgba(245, 189, 230, 0.35);
}
#subtitleRoot .word.word-jlpt-n1 {
text-decoration-line: underline;
text-decoration-thickness: 2px;
@@ -502,7 +508,7 @@ body.settings-modal-open #subtitleContainer {
}
#subtitleRoot .word.word-frequency-band-4 {
color: var(--subtitle-frequency-band-4-color, #a6e3a1);
color: var(--subtitle-frequency-band-4-color, #8bd5ca);
}
#subtitleRoot .word.word-frequency-band-5 {
@@ -510,7 +516,7 @@ body.settings-modal-open #subtitleContainer {
}
#subtitleRoot
.word:not(.word-known):not(.word-n-plus-one):not(.word-frequency-single):not(
.word:not(.word-known):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(
.word-frequency-band-1
):not(.word-frequency-band-2):not(.word-frequency-band-3):not(.word-frequency-band-4):not(
.word-frequency-band-5
@@ -523,6 +529,7 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot .word.word-known:hover,
#subtitleRoot .word.word-n-plus-one:hover,
#subtitleRoot .word.word-name-match:hover,
#subtitleRoot .word.word-frequency-single:hover,
#subtitleRoot .word.word-frequency-band-1:hover,
#subtitleRoot .word.word-frequency-band-2:hover,
@@ -536,6 +543,7 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot .word.word-known .c:hover,
#subtitleRoot .word.word-n-plus-one .c:hover,
#subtitleRoot .word.word-name-match .c:hover,
#subtitleRoot .word.word-frequency-single .c:hover,
#subtitleRoot .word.word-frequency-band-1 .c:hover,
#subtitleRoot .word.word-frequency-band-2 .c:hover,
@@ -550,7 +558,7 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-frequency-single):not(.word-frequency-band-1):not(
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5):hover {
color: var(--subtitle-hover-token-color, #f4dbd6) !important;
@@ -583,6 +591,12 @@ body.settings-modal-open #subtitleContainer {
-webkit-text-fill-color: var(--subtitle-n-plus-one-color, #c6a0f6) !important;
}
#subtitleRoot .word.word-name-match::selection,
#subtitleRoot .word.word-name-match .c::selection {
color: var(--subtitle-name-match-color, #f5bde6) !important;
-webkit-text-fill-color: var(--subtitle-name-match-color, #f5bde6) !important;
}
#subtitleRoot .word.word-frequency-single::selection,
#subtitleRoot .word.word-frequency-single .c::selection {
color: var(--subtitle-frequency-single-color, #f5a97f) !important;
@@ -609,8 +623,8 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot .word.word-frequency-band-4::selection,
#subtitleRoot .word.word-frequency-band-4 .c::selection {
color: var(--subtitle-frequency-band-4-color, #a6e3a1) !important;
-webkit-text-fill-color: var(--subtitle-frequency-band-4-color, #a6e3a1) !important;
color: var(--subtitle-frequency-band-4-color, #8bd5ca) !important;
-webkit-text-fill-color: var(--subtitle-frequency-band-4-color, #8bd5ca) !important;
}
#subtitleRoot .word.word-frequency-band-5::selection,
@@ -622,13 +636,13 @@ body.settings-modal-open #subtitleContainer {
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-frequency-single):not(.word-frequency-band-1):not(
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5)::selection,
#subtitleRoot
.word:is(.word-jlpt-n1, .word-jlpt-n2, .word-jlpt-n3, .word-jlpt-n4, .word-jlpt-n5):not(
.word-known
):not(.word-n-plus-one):not(.word-frequency-single):not(.word-frequency-band-1):not(
):not(.word-n-plus-one):not(.word-name-match):not(.word-frequency-single):not(.word-frequency-band-1):not(
.word-frequency-band-2
):not(.word-frequency-band-3):not(.word-frequency-band-4):not(.word-frequency-band-5)
.c::selection {

View File

@@ -181,6 +181,45 @@ test('computeWordClass preserves known and n+1 classes while adding JLPT classes
assert.equal(computeWordClass(nPlusOneJlpt), 'word word-n-plus-one word-jlpt-n2');
});
test('computeWordClass applies name-match class ahead of known and frequency classes', () => {
const token = createToken({
isKnown: true,
frequencyRank: 10,
surface: 'アクア',
}) as MergedToken & { isNameMatch?: boolean };
token.isNameMatch = true;
assert.equal(
computeWordClass(token, {
enabled: true,
topX: 100,
mode: 'single',
singleColor: '#000000',
bandedColors: ['#000000', '#000000', '#000000', '#000000', '#000000'] as const,
}),
'word word-name-match',
);
});
test('computeWordClass skips name-match class when disabled', () => {
const token = createToken({
surface: 'アクア',
}) as MergedToken & { isNameMatch?: boolean };
token.isNameMatch = true;
assert.equal(
computeWordClass(token, {
nameMatchEnabled: false,
enabled: true,
topX: 100,
mode: 'single',
singleColor: '#000000',
bandedColors: ['#000000', '#000000', '#000000', '#000000', '#000000'] as const,
}),
'word',
);
});
test('computeWordClass keeps known and N+1 color classes exclusive over frequency classes', () => {
const known = createToken({
isKnown: true,
@@ -229,6 +268,39 @@ test('computeWordClass keeps known and N+1 color classes exclusive over frequenc
);
});
test('applySubtitleStyle sets subtitle name-match color variable', () => {
const restoreDocument = installFakeDocument();
try {
const subtitleRoot = new FakeElement('div');
const subtitleContainer = new FakeElement('div');
const secondarySubRoot = new FakeElement('div');
const secondarySubContainer = new FakeElement('div');
const ctx = {
state: createRendererState(),
dom: {
subtitleRoot,
subtitleContainer,
secondarySubRoot,
secondarySubContainer,
},
} as never;
const renderer = createSubtitleRenderer(ctx);
renderer.applySubtitleStyle({
nameMatchColor: '#f5bde6',
} as never);
assert.equal(
(subtitleRoot.style as unknown as { values?: Map<string, string> }).values?.get(
'--subtitle-name-match-color',
),
'#f5bde6',
);
} finally {
restoreDocument();
}
});
test('computeWordClass adds frequency class for single mode when rank is within topX', () => {
const token = createToken({
surface: '猫',
@@ -598,7 +670,7 @@ test('JLPT CSS rules use underline-only styling in renderer stylesheet', () => {
assert.match(
cssText,
/#subtitleRoot\s+\.word:not\(\.word-known\):not\(\.word-n-plus-one\):not\(\.word-frequency-single\):not\(\s*\.word-frequency-band-1\s*\):not\(\.word-frequency-band-2\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\s*\.word-frequency-band-5\s*\):hover\s*\{[\s\S]*?background:\s*var\(--subtitle-hover-token-background-color,\s*rgba\(54,\s*58,\s*79,\s*0\.84\)\);[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
/#subtitleRoot\s+\.word:not\(\.word-known\):not\(\.word-n-plus-one\):not\(\.word-name-match\):not\(\.word-frequency-single\):not\(\s*\.word-frequency-band-1\s*\):not\(\.word-frequency-band-2\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\s*\.word-frequency-band-5\s*\):hover\s*\{[\s\S]*?background:\s*var\(--subtitle-hover-token-background-color,\s*rgba\(54,\s*58,\s*79,\s*0\.84\)\);[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
);
const coloredWordHoverBlock = extractClassBlock(cssText, '#subtitleRoot .word.word-known:hover');
@@ -636,11 +708,11 @@ test('JLPT CSS rules use underline-only styling in renderer stylesheet', () => {
assert.match(
cssText,
/\.word:is\(\.word-jlpt-n1,\s*\.word-jlpt-n2,\s*\.word-jlpt-n3,\s*\.word-jlpt-n4,\s*\.word-jlpt-n5\):not\(\s*\.word-known\s*\):not\(\.word-n-plus-one\):not\(\.word-frequency-single\):not\(\.word-frequency-band-1\):not\(\s*\.word-frequency-band-2\s*\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\.word-frequency-band-5\):hover\s*\{[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
/\.word:is\(\.word-jlpt-n1,\s*\.word-jlpt-n2,\s*\.word-jlpt-n3,\s*\.word-jlpt-n4,\s*\.word-jlpt-n5\):not\(\s*\.word-known\s*\):not\(\.word-n-plus-one\):not\(\.word-name-match\):not\(\.word-frequency-single\):not\(\.word-frequency-band-1\):not\(\s*\.word-frequency-band-2\s*\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\.word-frequency-band-5\):hover\s*\{[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
);
assert.match(
cssText,
/\.word:is\(\.word-jlpt-n1,\s*\.word-jlpt-n2,\s*\.word-jlpt-n3,\s*\.word-jlpt-n4,\s*\.word-jlpt-n5\):not\(\s*\.word-known\s*\):not\(\.word-n-plus-one\):not\(\.word-frequency-single\):not\(\.word-frequency-band-1\):not\(\s*\.word-frequency-band-2\s*\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\.word-frequency-band-5\)::selection[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
/\.word:is\(\.word-jlpt-n1,\s*\.word-jlpt-n2,\s*\.word-jlpt-n3,\s*\.word-jlpt-n4,\s*\.word-jlpt-n5\):not\(\s*\.word-known\s*\):not\(\.word-n-plus-one\):not\(\.word-name-match\):not\(\.word-frequency-single\):not\(\.word-frequency-band-1\):not\(\s*\.word-frequency-band-2\s*\):not\(\.word-frequency-band-3\):not\(\.word-frequency-band-4\):not\(\.word-frequency-band-5\)::selection[\s\S]*?color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;[\s\S]*?-webkit-text-fill-color:\s*var\(--subtitle-hover-token-color,\s*#f4dbd6\)\s*!important;/,
);
const selectionBlock = extractClassBlock(cssText, '#subtitleRoot::selection');

View File

@@ -9,6 +9,10 @@ type FrequencyRenderSettings = {
bandedColors: [string, string, string, string, string];
};
type TokenRenderSettings = FrequencyRenderSettings & {
nameMatchEnabled: boolean;
};
export type SubtitleTokenHoverRange = {
start: number;
end: number;
@@ -75,8 +79,9 @@ const DEFAULT_FREQUENCY_RENDER_SETTINGS: FrequencyRenderSettings = {
topX: 1000,
mode: 'single',
singleColor: '#f5a97f',
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#a6e3a1', '#8aadf4'],
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#8bd5ca', '#8aadf4'],
};
const DEFAULT_NAME_MATCH_ENABLED = true;
function sanitizeFrequencyTopX(value: unknown, fallback: number): number {
if (typeof value !== 'number' || !Number.isFinite(value) || value <= 0) {
@@ -218,25 +223,23 @@ export function getJlptLevelLabelForToken(token: MergedToken): string | null {
function renderWithTokens(
root: HTMLElement,
tokens: MergedToken[],
frequencyRenderSettings?: Partial<FrequencyRenderSettings>,
tokenRenderSettings?: Partial<TokenRenderSettings>,
sourceText?: string,
preserveLineBreaks = false,
): void {
const resolvedFrequencyRenderSettings = {
const resolvedTokenRenderSettings = {
...DEFAULT_FREQUENCY_RENDER_SETTINGS,
...frequencyRenderSettings,
...tokenRenderSettings,
bandedColors: sanitizeFrequencyBandedColors(
frequencyRenderSettings?.bandedColors,
tokenRenderSettings?.bandedColors,
DEFAULT_FREQUENCY_RENDER_SETTINGS.bandedColors,
),
topX: sanitizeFrequencyTopX(
frequencyRenderSettings?.topX,
DEFAULT_FREQUENCY_RENDER_SETTINGS.topX,
),
topX: sanitizeFrequencyTopX(tokenRenderSettings?.topX, DEFAULT_FREQUENCY_RENDER_SETTINGS.topX),
singleColor: sanitizeHexColor(
frequencyRenderSettings?.singleColor,
tokenRenderSettings?.singleColor,
DEFAULT_FREQUENCY_RENDER_SETTINGS.singleColor,
),
nameMatchEnabled: tokenRenderSettings?.nameMatchEnabled ?? DEFAULT_NAME_MATCH_ENABLED,
};
const fragment = document.createDocumentFragment();
@@ -257,14 +260,14 @@ function renderWithTokens(
const token = segment.token;
const span = document.createElement('span');
span.className = computeWordClass(token, resolvedFrequencyRenderSettings);
span.className = computeWordClass(token, resolvedTokenRenderSettings);
span.textContent = token.surface;
span.dataset.tokenIndex = String(segment.tokenIndex);
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
const frequencyRankLabel = getFrequencyRankLabelForToken(
token,
resolvedFrequencyRenderSettings,
resolvedTokenRenderSettings,
);
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
@@ -296,14 +299,14 @@ function renderWithTokens(
}
const span = document.createElement('span');
span.className = computeWordClass(token, resolvedFrequencyRenderSettings);
span.className = computeWordClass(token, resolvedTokenRenderSettings);
span.textContent = surface;
span.dataset.tokenIndex = String(index);
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
const frequencyRankLabel = getFrequencyRankLabelForToken(
token,
resolvedFrequencyRenderSettings,
resolvedTokenRenderSettings,
);
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
@@ -401,26 +404,32 @@ export function buildSubtitleTokenHoverRanges(
export function computeWordClass(
token: MergedToken,
frequencySettings?: Partial<FrequencyRenderSettings>,
tokenRenderSettings?: Partial<TokenRenderSettings>,
): string {
const resolvedFrequencySettings = {
const resolvedTokenRenderSettings = {
...DEFAULT_FREQUENCY_RENDER_SETTINGS,
...frequencySettings,
...tokenRenderSettings,
bandedColors: sanitizeFrequencyBandedColors(
frequencySettings?.bandedColors,
tokenRenderSettings?.bandedColors,
DEFAULT_FREQUENCY_RENDER_SETTINGS.bandedColors,
),
topX: sanitizeFrequencyTopX(frequencySettings?.topX, DEFAULT_FREQUENCY_RENDER_SETTINGS.topX),
topX: sanitizeFrequencyTopX(
tokenRenderSettings?.topX,
DEFAULT_FREQUENCY_RENDER_SETTINGS.topX,
),
singleColor: sanitizeHexColor(
frequencySettings?.singleColor,
tokenRenderSettings?.singleColor,
DEFAULT_FREQUENCY_RENDER_SETTINGS.singleColor,
),
nameMatchEnabled: tokenRenderSettings?.nameMatchEnabled ?? DEFAULT_NAME_MATCH_ENABLED,
};
const classes = ['word'];
if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (resolvedTokenRenderSettings.nameMatchEnabled && token.isNameMatch) {
classes.push('word-name-match');
} else if (token.isKnown) {
classes.push('word-known');
}
@@ -429,8 +438,12 @@ export function computeWordClass(
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
}
if (!token.isKnown && !token.isNPlusOneTarget) {
const frequencyClass = getFrequencyDictionaryClass(token, resolvedFrequencySettings);
if (
!token.isKnown &&
!token.isNPlusOneTarget &&
!(resolvedTokenRenderSettings.nameMatchEnabled && token.isNameMatch)
) {
const frequencyClass = getFrequencyDictionaryClass(token, resolvedTokenRenderSettings);
if (frequencyClass) {
classes.push(frequencyClass);
}
@@ -494,7 +507,7 @@ export function createSubtitleRenderer(ctx: RendererContext) {
renderWithTokens(
ctx.dom.subtitleRoot,
tokens,
getFrequencyRenderSettings(),
getTokenRenderSettings(),
text,
ctx.state.preserveSubtitleLineBreaks,
);
@@ -503,8 +516,9 @@ export function createSubtitleRenderer(ctx: RendererContext) {
renderCharacterLevel(ctx.dom.subtitleRoot, normalized);
}
function getFrequencyRenderSettings(): Partial<FrequencyRenderSettings> {
function getTokenRenderSettings(): Partial<TokenRenderSettings> {
return {
nameMatchEnabled: ctx.state.nameMatchEnabled,
enabled: ctx.state.frequencyDictionaryEnabled,
topX: ctx.state.frequencyDictionaryTopX,
mode: ctx.state.frequencyDictionaryMode,
@@ -577,6 +591,8 @@ export function createSubtitleRenderer(ctx: RendererContext) {
if (style.fontStyle) ctx.dom.subtitleRoot.style.fontStyle = style.fontStyle;
const knownWordColor = style.knownWordColor ?? ctx.state.knownWordColor ?? '#a6da95';
const nPlusOneColor = style.nPlusOneColor ?? ctx.state.nPlusOneColor ?? '#c6a0f6';
const nameMatchEnabled = style.nameMatchEnabled ?? ctx.state.nameMatchEnabled ?? true;
const nameMatchColor = style.nameMatchColor ?? ctx.state.nameMatchColor ?? '#f5bde6';
const hoverTokenColor = sanitizeSubtitleHoverTokenColor(style.hoverTokenColor);
const hoverTokenBackgroundColor = sanitizeSubtitleHoverTokenBackgroundColor(
style.hoverTokenBackgroundColor,
@@ -600,8 +616,11 @@ export function createSubtitleRenderer(ctx: RendererContext) {
ctx.state.knownWordColor = knownWordColor;
ctx.state.nPlusOneColor = nPlusOneColor;
ctx.state.nameMatchEnabled = nameMatchEnabled;
ctx.state.nameMatchColor = nameMatchColor;
ctx.dom.subtitleRoot.style.setProperty('--subtitle-known-word-color', knownWordColor);
ctx.dom.subtitleRoot.style.setProperty('--subtitle-n-plus-one-color', nPlusOneColor);
ctx.dom.subtitleRoot.style.setProperty('--subtitle-name-match-color', nameMatchColor);
ctx.dom.subtitleRoot.style.setProperty('--subtitle-hover-token-color', hoverTokenColor);
ctx.dom.subtitleRoot.style.setProperty(
'--subtitle-hover-token-background-color',

View File

@@ -1,79 +0,0 @@
export type SubsyncEngine = 'alass' | 'ffsubsync';
export interface SubsyncCommandResult {
ok: boolean;
code: number | null;
stderr: string;
stdout: string;
error?: string;
}
export interface SubsyncEngineExecutionContext {
referenceFilePath: string;
videoPath: string;
inputSubtitlePath: string;
outputPath: string;
audioStreamIndex: number | null;
resolveExecutablePath: (configuredPath: string, commandName: string) => string;
resolvedPaths: {
alassPath: string;
ffsubsyncPath: string;
};
runCommand: (command: string, args: string[]) => Promise<SubsyncCommandResult>;
}
export interface SubsyncEngineProvider {
engine: SubsyncEngine;
execute: (context: SubsyncEngineExecutionContext) => Promise<SubsyncCommandResult>;
}
type SubsyncEngineProviderFactory = () => SubsyncEngineProvider;
const subsyncEngineProviderFactories = new Map<SubsyncEngine, SubsyncEngineProviderFactory>();
export function registerSubsyncEngineProvider(
engine: SubsyncEngine,
factory: SubsyncEngineProviderFactory,
): void {
if (subsyncEngineProviderFactories.has(engine)) {
return;
}
subsyncEngineProviderFactories.set(engine, factory);
}
export function createSubsyncEngineProvider(engine: SubsyncEngine): SubsyncEngineProvider | null {
const factory = subsyncEngineProviderFactories.get(engine);
if (!factory) return null;
return factory();
}
function registerDefaultSubsyncEngineProviders(): void {
registerSubsyncEngineProvider('alass', () => ({
engine: 'alass',
execute: async (context: SubsyncEngineExecutionContext) => {
const alassPath = context.resolveExecutablePath(context.resolvedPaths.alassPath, 'alass');
return context.runCommand(alassPath, [
context.referenceFilePath,
context.inputSubtitlePath,
context.outputPath,
]);
},
}));
registerSubsyncEngineProvider('ffsubsync', () => ({
engine: 'ffsubsync',
execute: async (context: SubsyncEngineExecutionContext) => {
const ffsubsyncPath = context.resolveExecutablePath(
context.resolvedPaths.ffsubsyncPath,
'ffsubsync',
);
const args = [context.videoPath, '-i', context.inputSubtitlePath, '-o', context.outputPath];
if (context.audioStreamIndex !== null) {
args.push('--reference-stream', `0:${context.audioStreamIndex}`);
}
return context.runCommand(ffsubsyncPath, args);
},
}));
}
registerDefaultSubsyncEngineProviders();

View File

@@ -1,43 +0,0 @@
import { TokenMergerProvider } from '../token-mergers';
import { TokenizerProvider } from '../tokenizers';
import { SubtitleData } from '../types';
import { normalizeDisplayText, normalizeTokenizerInput } from './stages/normalize';
import { tokenizeStage } from './stages/tokenize';
import { mergeStage } from './stages/merge';
export interface SubtitlePipelineDeps {
getTokenizer: () => TokenizerProvider | null;
getTokenMerger: () => TokenMergerProvider | null;
}
export class SubtitlePipeline {
private readonly deps: SubtitlePipelineDeps;
constructor(deps: SubtitlePipelineDeps) {
this.deps = deps;
}
async process(text: string): Promise<SubtitleData> {
if (!text) {
return { text, tokens: null };
}
const displayText = normalizeDisplayText(text);
if (!displayText) {
return { text, tokens: null };
}
const tokenizeText = normalizeTokenizerInput(displayText);
try {
const tokens = await tokenizeStage(this.deps.getTokenizer(), tokenizeText);
const mergedTokens = mergeStage(this.deps.getTokenMerger(), tokens);
if (!mergedTokens || mergedTokens.length === 0) {
return { text: displayText, tokens: null };
}
return { text: displayText, tokens: mergedTokens };
} catch {
return { text: displayText, tokens: null };
}
}
}

View File

@@ -1,12 +0,0 @@
import { TokenMergerProvider } from '../../token-mergers';
import { MergedToken, Token } from '../../types';
export function mergeStage(
mergerProvider: TokenMergerProvider | null,
tokens: Token[] | null,
): MergedToken[] | null {
if (!mergerProvider || !tokens || tokens.length === 0) {
return null;
}
return mergerProvider.merge(tokens);
}

View File

@@ -1,10 +0,0 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { normalizeTokenizerInput } from './normalize';
test('normalizeTokenizerInput collapses zero-width separators between Japanese segments', () => {
const input = 'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが';
const normalized = normalizeTokenizerInput(input);
assert.equal(normalized, 'キリキリと かかってこい このヘナチョコ冒険者どもめが!');
});

View File

@@ -1,13 +0,0 @@
export function normalizeDisplayText(text: string): string {
return text.replace(/\r\n/g, '\n').replace(/\\N/g, '\n').replace(/\\n/g, '\n').trim();
}
const INVISIBLE_SEPARATOR_PATTERN = /[\u200b\u2060\ufeff]/g;
export function normalizeTokenizerInput(displayText: string): string {
return displayText
.replace(/\n/g, ' ')
.replace(INVISIBLE_SEPARATOR_PATTERN, ' ')
.replace(/\s+/g, ' ')
.trim();
}

View File

@@ -1,12 +0,0 @@
import { TokenizerProvider } from '../../tokenizers';
import { Token } from '../../types';
export async function tokenizeStage(
tokenizerProvider: TokenizerProvider | null,
input: string,
): Promise<Token[] | null> {
if (!tokenizerProvider || !input) {
return null;
}
return tokenizerProvider.tokenize(input);
}

View File

@@ -1,27 +0,0 @@
import { mergeTokens as defaultMergeTokens } from '../token-merger';
import { MergedToken, Token } from '../types';
export interface TokenMergerProvider {
id: string;
merge: (tokens: Token[]) => MergedToken[];
}
type TokenMergerProviderFactory = () => TokenMergerProvider;
const tokenMergerProviderFactories = new Map<string, TokenMergerProviderFactory>();
export function registerTokenMergerProvider(id: string, factory: TokenMergerProviderFactory): void {
if (tokenMergerProviderFactories.has(id)) {
return;
}
tokenMergerProviderFactories.set(id, factory);
}
function registerDefaultTokenMergerProviders(): void {
registerTokenMergerProvider('default', () => ({
id: 'default',
merge: (tokens: Token[]) => defaultMergeTokens(tokens),
}));
}
registerDefaultTokenMergerProviders();

View File

@@ -1,36 +0,0 @@
import { MecabTokenizer } from '../mecab-tokenizer';
import { MecabStatus, Token } from '../types';
export interface TokenizerProvider {
id: string;
checkAvailability: () => Promise<boolean>;
tokenize: (text: string) => Promise<Token[] | null>;
getStatus: () => MecabStatus;
setEnabled: (enabled: boolean) => void;
}
type TokenizerProviderFactory = () => TokenizerProvider;
const tokenizerProviderFactories = new Map<string, TokenizerProviderFactory>();
export function registerTokenizerProvider(id: string, factory: TokenizerProviderFactory): void {
if (tokenizerProviderFactories.has(id)) {
return;
}
tokenizerProviderFactories.set(id, factory);
}
function registerDefaultTokenizerProviders(): void {
registerTokenizerProvider('mecab', () => {
const mecab = new MecabTokenizer();
return {
id: 'mecab',
checkAvailability: () => mecab.checkAvailability(),
tokenize: (text: string) => mecab.tokenize(text),
getStatus: () => mecab.getStatus(),
setEnabled: (enabled: boolean) => mecab.setEnabled(enabled),
};
});
}
registerDefaultTokenizerProviders();

View File

@@ -1,101 +0,0 @@
import axios from 'axios';
export interface TranslationRequest {
sentence: string;
apiKey: string;
baseUrl: string;
model: string;
targetLanguage: string;
systemPrompt: string;
timeoutMs?: number;
}
export interface TranslationProvider {
id: string;
translate: (request: TranslationRequest) => Promise<string | null>;
}
type TranslationProviderFactory = () => TranslationProvider;
const translationProviderFactories = new Map<string, TranslationProviderFactory>();
export function registerTranslationProvider(id: string, factory: TranslationProviderFactory): void {
if (translationProviderFactories.has(id)) {
return;
}
translationProviderFactories.set(id, factory);
}
export function createTranslationProvider(id = 'openai-compatible'): TranslationProvider | null {
const factory = translationProviderFactories.get(id);
if (!factory) return null;
return factory();
}
function extractAiText(content: unknown): string {
if (typeof content === 'string') {
return content.trim();
}
if (!Array.isArray(content)) {
return '';
}
const parts: string[] = [];
for (const item of content) {
if (
item &&
typeof item === 'object' &&
'type' in item &&
(item as { type?: unknown }).type === 'text' &&
'text' in item &&
typeof (item as { text?: unknown }).text === 'string'
) {
parts.push((item as { text: string }).text);
}
}
return parts.join('').trim();
}
function normalizeOpenAiBaseUrl(baseUrl: string): string {
const trimmed = baseUrl.trim().replace(/\/+$/, '');
if (/\/v1$/i.test(trimmed)) {
return trimmed;
}
return `${trimmed}/v1`;
}
function registerDefaultTranslationProviders(): void {
registerTranslationProvider('openai-compatible', () => ({
id: 'openai-compatible',
translate: async (request: TranslationRequest): Promise<string | null> => {
const response = await axios.post(
`${normalizeOpenAiBaseUrl(request.baseUrl)}/chat/completions`,
{
model: request.model,
temperature: 0,
messages: [
{ role: 'system', content: request.systemPrompt },
{
role: 'user',
content: `Translate this text to ${request.targetLanguage}:\n\n${request.sentence}`,
},
],
},
{
headers: {
Authorization: `Bearer ${request.apiKey}`,
'Content-Type': 'application/json',
},
timeout: request.timeoutMs ?? 15000,
},
);
const content = (response.data as { choices?: unknown[] })?.choices?.[0] as
| { message?: { content?: unknown } }
| undefined;
const translated = extractAiText(content?.message?.content);
return translated || null;
},
}));
}
registerDefaultTranslationProviders();

View File

@@ -54,6 +54,7 @@ export interface MergedToken {
isMerged: boolean;
isKnown: boolean;
isNPlusOneTarget: boolean;
isNameMatch?: boolean;
jlptLevel?: JlptLevel;
frequencyRank?: number;
}
@@ -293,6 +294,8 @@ export interface SubtitleStyleConfig {
autoPauseVideoOnYomitanPopup?: boolean;
hoverTokenColor?: string;
hoverTokenBackgroundColor?: string;
nameMatchEnabled?: boolean;
nameMatchColor?: string;
fontFamily?: string;
fontSize?: number;
fontColor?: string;