feat: streamline Kiku duplicate grouping and popup flow (#38)

This commit is contained in:
2026-04-01 00:04:03 -07:00
parent ec64eebb80
commit 006ff22d42
29 changed files with 1218 additions and 34 deletions

View File

@@ -109,7 +109,7 @@ Browse sibling episode files and the active mpv queue in one overlay modal. Open
## Requirements ## Requirements
| | Required | Optional | | | Required | Optional |
| -------------- | --------------------------------------- | -------------------------------------- | | -------------- | --------------------------------------- | ---------------------------------------------------------- |
| **Player** | [`mpv`](https://mpv.io) with IPC socket | — | | **Player** | [`mpv`](https://mpv.io) with IPC socket | — |
| **Processing** | `ffmpeg`, `mecab` + `mecab-ipadic` | `guessit` (AniSkip), `alass` / `ffsubsync` (subtitle sync) | | **Processing** | `ffmpeg`, `mecab` + `mecab-ipadic` | `guessit` (AniSkip), `alass` / `ffsubsync` (subtitle sync) |
| **Media** | — | `yt-dlp`, `chafa`, `ffmpegthumbnailer` | | **Media** | — | `yt-dlp`, `chafa`, `ffmpegthumbnailer` |
@@ -236,8 +236,6 @@ subminer stats -b # stats daemon in background
subminer stats -s # stop background stats daemon subminer stats -s # stop background stats daemon
``` ```
---
## Documentation ## Documentation
Full guides on configuration, Anki setup, Jellyfin, immersion tracking, and more: **[docs.subminer.moe](https://docs.subminer.moe)** Full guides on configuration, Anki setup, Jellyfin, immersion tracking, and more: **[docs.subminer.moe](https://docs.subminer.moe)**

View File

@@ -0,0 +1,39 @@
---
id: TASK-263
title: Reuse pre-add duplicate IDs for generic Kiku field grouping
status: Done
assignee: []
created_date: '2026-03-31 20:44'
updated_date: '2026-03-31 20:48'
labels:
- anki
- kiku
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Avoid the extra post-add duplicate lookup on the generic sentence-card creation path by capturing duplicate note IDs before add and reusing that result for Kiku field grouping. Keep Yomitan semantics aligned where practical so duplicate selection is consistent across mining paths.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Generic sentence-card creation captures duplicate note IDs before add and reuses them for Kiku field grouping instead of running the existing post-add duplicate finder
- [x] #2 Duplicate selection remains deterministic when multiple matching notes exist
- [x] #3 Regression tests cover the generic path duplicate reuse behavior and preserve existing non-Kiku behavior
- [x] #4 Internal docs/config comments are updated if the behavior or operator-facing semantics changed
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
No docs update was required because this is internal duplicate-selection plumbing and does not change user-facing config surface.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Generic sentence-card creation now captures exact duplicate note IDs before add when Kiku field grouping is enabled and stores that context by created note ID. Manual field grouping reuses the tracked duplicate IDs first and deterministically picks the most recent matching note, falling back to the legacy duplicate finder only when no tracked context exists. Verified with bun test src/anki-integration/duplicate.test.ts src/anki-integration/card-creation.test.ts src/anki-integration/field-grouping.test.ts and bun run typecheck.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,41 @@
---
id: TASK-263.1
title: Reuse Yomitan popup duplicate IDs in SubMiner bridge
status: Done
assignee: []
created_date: '2026-03-31 22:15'
updated_date: '2026-03-31 22:21'
labels:
- anki
- kiku
- yomitan
dependencies: []
parent_task_id: TASK-263
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Thread Yomitan popup/search duplicate note IDs through the existing SubMiner bridge so Kiku/manual grouping can reuse the same duplicate context that already drives the Add duplicate button. Implement and test against the vendored Yomitan copy first; do not rely on upstreamed fork changes yet.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Vendored Yomitan bridge returns duplicate note IDs for popup/search mining when available
- [x] #2 SubMiner consumes the bridged duplicate IDs and prefers them for Kiku/manual grouping on the Yomitan mining path
- [x] #3 Regression tests cover the popup/search bridge payload and duplicate-id reuse behavior
- [x] #4 No commit is made for vendored Yomitan-only changes in this repo state
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Vendored files changed locally for validation only: vendor/subminer-yomitan/ext/js/display/display-anki.js, vendor/subminer-yomitan/ext/js/comm/api.js, vendor/subminer-yomitan/ext/js/comm/anki-connect.js, vendor/subminer-yomitan/ext/js/background/backend.js. Do not commit those vendor changes in this repo; port them to the fork instead.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Vendored Yomitan popup/search mining now precomputes duplicate note IDs, sends them to the SubMiner Anki proxy as private addNote metadata, and still returns note/duplicate data through the parser bridge. The proxy strips the private metadata before forwarding to upstream AnkiConnect, associates the duplicate IDs with the created note before auto-enrichment begins, and SubMiner also records the bridge result as a secondary cache path. Verified with bun test src/anki-integration/duplicate.test.ts src/anki-integration/card-creation.test.ts src/anki-integration/field-grouping.test.ts src/anki-integration/anki-connect-proxy.test.ts src/core/services/tokenizer/yomitan-parser-runtime.test.ts and bun run typecheck.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,43 @@
---
id: TASK-263.2
title: >-
Keep Yomitan popup responsive during background add and pause/close before
Kiku modal
status: Done
assignee: []
created_date: '2026-04-01 00:42'
updated_date: '2026-04-01 02:35'
labels:
- anki
- yomitan
- kiku
- ux
dependencies: []
parent_task_id: TASK-263
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Make Yomitan popup add run in background without blocking popup responsiveness. Before opening Kiku field-grouping modal, pause MPV and close the Yomitan popup/parser window if open.
<!-- SECTION:DESCRIPTION:END -->
## Definition of Done
<!-- DOD:BEGIN -->
- [x] #1 Popup save path returns immediately and prevents duplicate submits
- [x] #2 Field-grouping modal request pauses MPV and closes Yomitan popup window first
- [x] #3 Regression tests cover async save dispatch and main-side pause/close hook
<!-- DOD:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
2026-03-31: Removed the custom pending label/gray save-button presentation from vendored Yomitan. Background add still runs asynchronously with the internal pending-save guard, so duplicate clicks are ignored while the button keeps its stock appearance.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Yomitan popup save dispatches note creation/add in the background with an internal pending-save guard so repeated clicks are ignored without blocking the popup. Before opening the Kiku field-grouping modal, the renderer now closes the visible lookup popup and pauses MPV. Follow-up UX polish removed the custom pending label/gray styling so the save button keeps Yomitans stock presentation while the background action runs.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,26 @@
---
id: TASK-264
title: Replace axios with native fetch across the project
status: To Do
assignee: []
created_date: '2026-04-01 00:44'
labels: []
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Remove axios from the codebase and migrate all project HTTP requests to the platform fetch API, preserving existing request behavior and error handling where applicable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 No production code paths import or depend on axios.
- [ ] #2 All existing HTTP requests use fetch or a project-local abstraction built on fetch.
- [ ] #3 Request behavior remains functionally equivalent for headers, query params, bodies, status handling, and abort/error cases that are currently supported.
- [ ] #4 Tests are updated or added to cover the migrated request flows.
- [ ] #5 Documentation is updated if any request semantics or setup steps change.
- [ ] #6 axios is removed from project dependencies if it is no longer needed.
<!-- AC:END -->

View File

@@ -0,0 +1,53 @@
---
id: TASK-265
title: Add remote backend for immersion tracking and stats (prefer Postgres)
status: To Do
assignee: []
created_date: '2026-04-01 00:47'
labels: []
dependencies: []
references:
- >-
/home/sudacode/projects/japanese/SubMiner/src/core/services/immersion-tracker-service.ts
- >-
/home/sudacode/projects/japanese/SubMiner/src/core/services/immersion-tracker/storage.ts
- >-
/home/sudacode/projects/japanese/SubMiner/src/core/services/immersion-tracker/sqlite.ts
- /home/sudacode/projects/japanese/SubMiner/src/stats-daemon-runner.ts
- /home/sudacode/projects/japanese/SubMiner/src/core/services/stats-server.ts
- /home/sudacode/projects/japanese/SubMiner/src/main/boot/services.ts
- /home/sudacode/projects/japanese/SubMiner/package.json
documentation:
- /home/sudacode/projects/japanese/SubMiner/docs/architecture/README.md
- >-
/home/sudacode/projects/japanese/SubMiner/docs/architecture/stats-trends-data-flow.md
- /home/sudacode/projects/japanese/SubMiner/README.md
- /home/sudacode/projects/japanese/SubMiner/config.example.jsonc
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Enable immersion tracking/stats to use a remote authoritative backend so multiple devices can share the same history.
Current state: `ImmersionTrackerService` opens a local `immersion.sqlite` file from the app data/config path, `stats-daemon-runner` points at that same local file, and `config.example.jsonc` only exposes `immersionTracking.dbPath` for a local path override. The stats API/dashboard reads from the same tracker service and assumes the local database is the source of truth.
Goal: add a remote backend option that avoids shared filesystem/database-file syncing between devices. Do not use SSH/rsync/shared network filesystem as the primary sync strategy for live multi-device use.
Backend choice: prefer Postgres if it can be integrated without a broad new dependency surface or destabilizing the current runtime; otherwise use the least invasive remote backend that can be shipped with the current stack and document the tradeoff clearly. Preserve the current local SQLite mode as the default/offline fallback if possible.
This ticket should cover the full product/architecture change: configuration, storage access, stats reads, startup/error handling, migration/bootstrap from existing local data, tests, and docs.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The app can be configured to use a remote authoritative backend for immersion tracking instead of only a local `immersion.sqlite` file.
- [ ] #2 The chosen backend persists tracker writes and serves the existing stats read models across app restarts.
- [ ] #3 Two devices can point at the same remote backend without relying on a shared filesystem or raw SQLite file sync.
- [ ] #4 Local SQLite remains supported as the default or fallback mode for offline use.
- [ ] #5 If the remote backend is unavailable or misconfigured, startup/write paths fail with actionable errors instead of silent data loss.
- [ ] #6 A migration or bootstrap path exists to move existing local immersion data into the remote backend or seed a new device from it.
- [ ] #7 Config/examples/docs explain the backend choice, required connection/setup details, and any security/network assumptions.
- [ ] #8 Tests cover backend selection plus at least one representative write/read path against the remote backend.
<!-- AC:END -->

View File

@@ -0,0 +1,34 @@
---
id: TASK-266
title: Preserve paused state for configured subtitle-jump keybindings
status: Done
assignee: []
created_date: '2026-04-01 03:19'
updated_date: '2026-04-01 03:19'
labels:
- renderer
- mpv
- keybindings
- regression
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Regression: configured overlay keybindings that forward raw mpv subtitle-jump commands (for example previous-subtitle on H) can resume playback when invoked while paused. Keyboard-driven edge jumps already preserve paused state; configured keybindings should match that behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Configured subtitle-jump keybindings preserve paused playback state after backward seek
- [x] #2 Existing keyboard-driven subtitle navigation behavior remains unchanged
- [x] #3 Regression test covers paused configured subtitle-jump keybinding handling
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Configured overlay keybindings that forward `sub-seek` commands now re-check paused state and reapply pause after the seek when playback was already paused. This aligns raw configured subtitle-jump keybindings with the existing keyboard-driven edge-jump behavior and adds regression coverage for the paused backward-seek case.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,34 @@
---
id: TASK-267
title: Port validated Yomitan popup changes to fork and resync submodule
status: Done
assignee: []
created_date: '2026-04-01 03:30'
updated_date: '2026-04-01 03:33'
labels:
- yomitan
- submodule
- git
- integration
dependencies: []
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Take the locally validated Yomitan popup/bridge changes from the vendored copy, apply them to the standalone `../subminer-yomitan` fork, verify the fork, push the fork commit, then reset the vendored working tree in SubMiner and update the submodule pointer to the pushed fork commit.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Standalone `../subminer-yomitan` contains the validated popup/bridge changes and passes the relevant regression test
- [x] #2 The fork commit is pushed to its configured remote branch
- [x] #3 SubMiner vendored Yomitan working tree is reset and the submodule pointer is updated to the pushed fork commit
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Applied the validated popup/bridge changes from the vendored Yomitan copy into `../subminer-yomitan`, added the focused async-save regression test there, installed fork deps, and verified with `npx vitest run test/display-anki-save.test.js`. Committed the fork changes as `feat: preserve async popup save state and duplicate metadata`, rebased onto the updated remote `main`, and pushed commit `69620abc` to `origin/main`. Then reset the vendored submodule working tree in SubMiner, checked it out at `69620abc`, and left the superproject with the submodule pointer updated from `3c9ee577` to `69620abc`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,44 @@
---
id: TASK-268
title: 'Address CodeRabbit review action items for PR #38'
status: Done
assignee: []
created_date: '2026-04-01 05:35'
updated_date: '2026-04-01 06:07'
labels:
- pr-review
- coderabbit
dependencies: []
references:
- 'https://github.com/ksyasuda/SubMiner/pull/38'
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Review unresolved CodeRabbit feedback on PR #38 and implement the actionable fixes without regressing duplicate grouping or popup behavior.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 All unresolved actionable CodeRabbit review comments on PR #38 are triaged and either fixed in code or explicitly identified as non-actionable or ambiguous.
- [x] #2 Code changes preserve duplicate grouping and popup flow behavior covered by existing or added regression tests.
- [x] #3 Relevant local verification for the affected areas passes.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
2026-04-01: Reopened for follow-up CodeRabbit round after commit 233bde58. Remaining actionable items: guard maxMatches <= 0 in duplicate exact-match helper and strengthen the duplicate tracking test fixture to prove deduplication as well as sorting.
2026-04-01: Follow-up round addressed locally. Added guard for maxMatches <= 0 in duplicate exact-match scanning and strengthened the pre-add duplicate tracking test fixture to prove deduplication as well as sorting.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Addressed all unresolved actionable CodeRabbit comments on PR #38. Fixed duplicate tracking so empty duplicate lists are not persisted after sentence-card creation, sanitized Yomitan add-note noteId values to accept only positive integers, preserved paused playback for configured subtitle-seek keybindings when pause state is unknown, and short-circuited duplicate exact-match scanning for single-result lookups. Added regression tests for each case and verified with `bun test` on the affected suites plus `bun run typecheck`, `bun run test:fast`, `bun run test:env`, `bun run build`, and `bun run test:smoke:dist`.
Follow-up CodeRabbit round addressed locally: `findExactDuplicateNoteIds()` now returns early when `maxMatches <= 0`, and the sentence-card duplicate tracking regression test now uses a repeated duplicate ID to assert deduplication plus sorting. Re-verified with targeted duplicate/card tests, `bun run typecheck`, and `bun run test:fast`.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,6 @@
type: fixed
area: overlay
- Fixed Kiku duplicate grouping to reuse duplicate note IDs from both generic sentence-card creation and Yomitan popup mining instead of running extra duplicate scans after add.
- Fixed the Yomitan popup mining flow to add cards in the background while keeping the stock popup progress feedback, then pause playback and close the lookup popup before the Kiku merge modal opens.
- Fixed configured subtitle-jump keybindings so backward and forward subtitle seeks keep playback paused when invoked from a paused state.

View File

@@ -969,6 +969,7 @@ To refresh roughly once per day, set:
| `disabled` | No field grouping; duplicate cards are left as-is | | `disabled` | No field grouping; duplicate cards are left as-is |
`deleteDuplicateInAuto` controls whether `auto` mode deletes the duplicate after merge (default: `true`). In `manual` mode, the popup asks each time whether to delete the duplicate. `deleteDuplicateInAuto` controls whether `auto` mode deletes the duplicate after merge (default: `true`). In `manual` mode, the popup asks each time whether to delete the duplicate.
When the manual merge popup opens, SubMiner pauses playback and closes any open Yomitan popup first so the merge flow can take focus.
<video controls playsinline preload="metadata" poster="/assets/kiku-integration-poster.jpg" style="width: 100%; max-width: 960px;"> <video controls playsinline preload="metadata" poster="/assets/kiku-integration-poster.jpg" style="width: 100%; max-width: 960px;">
<source :src="'/assets/kiku-integration.webm'" type="video/webm" /> <source :src="'/assets/kiku-integration.webm'" type="video/webm" />

View File

@@ -51,9 +51,16 @@ function ensureSubmodulePresent() {
} }
function getSourceState() { function getSourceState() {
try {
const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir); const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir);
const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir); const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir);
return { revision, dirty }; return { revision, dirty };
} catch (error) {
if (process.env.SUBMINER_YOMITAN_ALLOW_MISSING_GIT === '1') {
return { revision: 'unknown', dirty: '' };
}
throw error;
}
} }
function isBuildCurrent(force) { function isBuildCurrent(force) {

View File

@@ -51,6 +51,7 @@ import { KnownWordCacheManager } from './anki-integration/known-word-cache';
import { PollingRunner } from './anki-integration/polling'; import { PollingRunner } from './anki-integration/polling';
import type { AnkiConnectProxyServer } from './anki-integration/anki-connect-proxy'; import type { AnkiConnectProxyServer } from './anki-integration/anki-connect-proxy';
import { findDuplicateNote as findDuplicateNoteForAnkiIntegration } from './anki-integration/duplicate'; import { findDuplicateNote as findDuplicateNoteForAnkiIntegration } from './anki-integration/duplicate';
import { findDuplicateNoteIds as findDuplicateNoteIdsForAnkiIntegration } from './anki-integration/duplicate';
import { CardCreationService } from './anki-integration/card-creation'; import { CardCreationService } from './anki-integration/card-creation';
import { FieldGroupingService } from './anki-integration/field-grouping'; import { FieldGroupingService } from './anki-integration/field-grouping';
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge'; import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
@@ -148,6 +149,7 @@ export class AnkiIntegration {
private aiConfig: AiConfig; private aiConfig: AiConfig;
private recordCardsMinedCallback: ((count: number, noteIds?: number[]) => void) | null = null; private recordCardsMinedCallback: ((count: number, noteIds?: number[]) => void) | null = null;
private noteIdRedirects = new Map<number, number>(); private noteIdRedirects = new Map<number, number>();
private trackedDuplicateNoteIds = new Map<number, number[]>();
constructor( constructor(
config: AnkiConnectConfig, config: AnkiConnectConfig,
@@ -264,6 +266,9 @@ export class AnkiIntegration {
recordCardsAdded: (count, noteIds) => { recordCardsAdded: (count, noteIds) => {
this.recordCardsMinedSafely(count, noteIds, 'proxy'); this.recordCardsMinedSafely(count, noteIds, 'proxy');
}, },
trackAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
this.trackDuplicateNoteIdsForNote(noteId, duplicateNoteIds);
},
getDeck: () => this.config.deck, getDeck: () => this.config.deck,
findNotes: async (query, options) => findNotes: async (query, options) =>
(await this.client.findNotes(query, options)) as number[], (await this.client.findNotes(query, options)) as number[],
@@ -361,6 +366,10 @@ export class AnkiIntegration {
trackLastAddedNoteId: (noteId) => { trackLastAddedNoteId: (noteId) => {
this.previousNoteIds.add(noteId); this.previousNoteIds.add(noteId);
}, },
trackLastAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
this.trackedDuplicateNoteIds.set(noteId, [...duplicateNoteIds]);
},
findDuplicateNoteIds: (expression, noteInfo) => this.findDuplicateNoteIds(expression, noteInfo),
recordCardsMinedCallback: (count, noteIds) => { recordCardsMinedCallback: (count, noteIds) => {
this.recordCardsMinedSafely(count, noteIds, 'card creation'); this.recordCardsMinedSafely(count, noteIds, 'card creation');
}, },
@@ -382,6 +391,10 @@ export class AnkiIntegration {
extractFields: (fields) => this.extractFields(fields), extractFields: (fields) => this.extractFields(fields),
findDuplicateNote: (expression, noteId, noteInfo) => findDuplicateNote: (expression, noteId, noteInfo) =>
this.findDuplicateNote(expression, noteId, noteInfo), this.findDuplicateNote(expression, noteId, noteInfo),
getTrackedDuplicateNoteIds: (noteId) =>
this.trackedDuplicateNoteIds.has(noteId)
? [...(this.trackedDuplicateNoteIds.get(noteId) ?? [])]
: null,
hasAllConfiguredFields: (noteInfo, configuredFieldNames) => hasAllConfiguredFields: (noteInfo, configuredFieldNames) =>
this.hasAllConfiguredFields(noteInfo, configuredFieldNames), this.hasAllConfiguredFields(noteInfo, configuredFieldNames),
processNewCard: (noteId, options) => this.processNewCard(noteId, options), processNewCard: (noteId, options) => this.processNewCard(noteId, options),
@@ -1042,6 +1055,10 @@ export class AnkiIntegration {
); );
} }
trackDuplicateNoteIdsForNote(noteId: number, duplicateNoteIds: number[]): void {
this.trackedDuplicateNoteIds.set(noteId, [...duplicateNoteIds]);
}
private async findDuplicateNote( private async findDuplicateNote(
expression: string, expression: string,
excludeNoteId: number, excludeNoteId: number,
@@ -1065,6 +1082,28 @@ export class AnkiIntegration {
}); });
} }
private async findDuplicateNoteIds(
expression: string,
noteInfo: NoteInfo,
): Promise<number[]> {
return findDuplicateNoteIdsForAnkiIntegration(expression, -1, noteInfo, {
findNotes: async (query, options) => (await this.client.findNotes(query, options)) as unknown,
notesInfo: async (noteIds) => (await this.client.notesInfo(noteIds)) as unknown,
getDeck: () => this.config.deck,
getWordFieldCandidates: () => this.getConfiguredWordFieldCandidates(),
resolveFieldName: (info, preferredName) => this.resolveNoteFieldName(info, preferredName),
logInfo: (message) => {
log.info(message);
},
logDebug: (message) => {
log.debug(message);
},
logWarn: (message, error) => {
log.warn(message, (error as Error).message);
},
});
}
private getPreferredSentenceAudioFieldName(): string { private getPreferredSentenceAudioFieldName(): string {
const sentenceCardConfig = this.getEffectiveSentenceCardConfig(); const sentenceCardConfig = this.getEffectiveSentenceCardConfig();
return sentenceCardConfig.audioField || 'SentenceAudio'; return sentenceCardConfig.audioField || 'SentenceAudio';

View File

@@ -324,6 +324,123 @@ test('proxy fallback-enqueues latest note for addNote responses without note IDs
assert.deepEqual(recordedCards, [1]); assert.deepEqual(recordedCards, [1]);
}); });
test('proxy tracks duplicate note ids from addNote request metadata before enrichment', async () => {
const processed: number[] = [];
const tracked: Array<{ noteId: number; duplicateNoteIds: number[] }> = [];
const proxy = new AnkiConnectProxyServer({
shouldAutoUpdateNewCards: () => true,
processNewCard: async (noteId) => {
processed.push(noteId);
},
trackAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
tracked.push({ noteId, duplicateNoteIds });
},
logInfo: () => undefined,
logWarn: () => undefined,
logError: () => undefined,
});
(
proxy as unknown as {
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
}
).maybeEnqueueFromRequest(
{
action: 'addNote',
params: {
note: {},
subminerDuplicateNoteIds: [11, -1, 40, 11, 25],
},
},
Buffer.from(JSON.stringify({ result: 42, error: null }), 'utf8'),
);
await waitForCondition(() => processed.length === 1);
assert.deepEqual(tracked, [{ noteId: 42, duplicateNoteIds: [11, 25, 40] }]);
assert.deepEqual(processed, [42]);
});
test('proxy strips SubMiner duplicate metadata before forwarding upstream addNote request', async () => {
let upstreamBody = '';
const upstream = http.createServer(async (req, res) => {
upstreamBody = await new Promise<string>((resolve) => {
const chunks: Buffer[] = [];
req.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
req.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
});
res.statusCode = 200;
res.setHeader('content-type', 'application/json');
res.end(JSON.stringify({ result: 42, error: null }));
});
upstream.listen(0, '127.0.0.1');
await once(upstream, 'listening');
const upstreamAddress = upstream.address();
assert.ok(upstreamAddress && typeof upstreamAddress === 'object');
const upstreamPort = upstreamAddress.port;
const tracked: Array<{ noteId: number; duplicateNoteIds: number[] }> = [];
const proxy = new AnkiConnectProxyServer({
shouldAutoUpdateNewCards: () => true,
processNewCard: async () => undefined,
trackAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
tracked.push({ noteId, duplicateNoteIds });
},
logInfo: () => undefined,
logWarn: () => undefined,
logError: () => undefined,
});
try {
proxy.start({
host: '127.0.0.1',
port: 0,
upstreamUrl: `http://127.0.0.1:${upstreamPort}`,
});
const proxyServer = (
proxy as unknown as {
server: http.Server | null;
}
).server;
assert.ok(proxyServer);
if (!proxyServer.listening) {
await once(proxyServer, 'listening');
}
const proxyAddress = proxyServer.address();
assert.ok(proxyAddress && typeof proxyAddress === 'object');
const proxyPort = proxyAddress.port;
const response = await fetch(`http://127.0.0.1:${proxyPort}`, {
method: 'POST',
headers: {
'content-type': 'application/json',
},
body: JSON.stringify({
action: 'addNote',
version: 6,
params: {
note: {
deckName: 'Mining',
modelName: 'Sentence',
fields: { Expression: '食べる' },
},
subminerDuplicateNoteIds: [18, 7],
},
}),
});
assert.equal(response.status, 200);
assert.deepEqual(await response.json(), { result: 42, error: null });
await waitForCondition(() => tracked.length === 1);
assert.equal(upstreamBody.includes('subminerDuplicateNoteIds'), false);
assert.deepEqual(tracked, [{ noteId: 42, duplicateNoteIds: [7, 18] }]);
} finally {
proxy.stop();
upstream.close();
await once(upstream, 'close');
}
});
test('proxy returns addNote response without waiting for background enrichment', async () => { test('proxy returns addNote response without waiting for background enrichment', async () => {
const processed: number[] = []; const processed: number[] = [];
let releaseProcessing: (() => void) | undefined; let releaseProcessing: (() => void) | undefined;

View File

@@ -16,6 +16,7 @@ export interface AnkiConnectProxyServerDeps {
shouldAutoUpdateNewCards: () => boolean; shouldAutoUpdateNewCards: () => boolean;
processNewCard: (noteId: number) => Promise<void>; processNewCard: (noteId: number) => Promise<void>;
recordCardsAdded?: (count: number, noteIds: number[]) => void; recordCardsAdded?: (count: number, noteIds: number[]) => void;
trackAddedDuplicateNoteIds?: (noteId: number, duplicateNoteIds: number[]) => void;
getDeck?: () => string | undefined; getDeck?: () => string | undefined;
findNotes?: ( findNotes?: (
query: string, query: string,
@@ -161,6 +162,7 @@ export class AnkiConnectProxyServer {
} }
try { try {
const forwardedBody = req.method === 'POST' ? this.getForwardRequestBody(rawBody, requestJson) : rawBody;
const targetUrl = new URL(req.url || '/', upstreamUrl).toString(); const targetUrl = new URL(req.url || '/', upstreamUrl).toString();
const contentType = const contentType =
typeof req.headers['content-type'] === 'string' typeof req.headers['content-type'] === 'string'
@@ -169,7 +171,7 @@ export class AnkiConnectProxyServer {
const upstreamResponse = await this.client.request<ArrayBuffer>({ const upstreamResponse = await this.client.request<ArrayBuffer>({
url: targetUrl, url: targetUrl,
method: req.method, method: req.method,
data: req.method === 'POST' ? rawBody : undefined, data: req.method === 'POST' ? forwardedBody : undefined,
headers: { headers: {
'content-type': contentType, 'content-type': contentType,
}, },
@@ -219,6 +221,8 @@ export class AnkiConnectProxyServer {
return; return;
} }
this.maybeTrackDuplicateNoteIds(requestJson, action, responseResult);
const noteIds = const noteIds =
action === 'multi' action === 'multi'
? this.collectMultiResultIds(requestJson, responseResult) ? this.collectMultiResultIds(requestJson, responseResult)
@@ -231,6 +235,77 @@ export class AnkiConnectProxyServer {
this.enqueueNotes(noteIds); this.enqueueNotes(noteIds);
} }
private maybeTrackDuplicateNoteIds(
requestJson: Record<string, unknown>,
action: string,
responseResult: unknown,
): void {
if (action !== 'addNote') {
return;
}
const duplicateNoteIds = this.getRequestDuplicateNoteIds(requestJson);
if (duplicateNoteIds.length === 0) {
return;
}
const noteId = this.collectSingleResultId(responseResult)[0];
if (!noteId) {
return;
}
this.deps.trackAddedDuplicateNoteIds?.(noteId, duplicateNoteIds);
}
private getForwardRequestBody(
rawBody: Buffer,
requestJson: Record<string, unknown> | null,
): Buffer {
if (!requestJson) {
return rawBody;
}
const sanitized = this.sanitizeRequestJson(requestJson);
if (sanitized === requestJson) {
return rawBody;
}
return Buffer.from(JSON.stringify(sanitized), 'utf8');
}
private sanitizeRequestJson(requestJson: Record<string, unknown>): Record<string, unknown> {
const action =
typeof requestJson.action === 'string' ? requestJson.action : String(requestJson.action ?? '');
if (action !== 'addNote') {
return requestJson;
}
const params =
requestJson.params && typeof requestJson.params === 'object'
? (requestJson.params as Record<string, unknown>)
: null;
if (!params || !Object.prototype.hasOwnProperty.call(params, 'subminerDuplicateNoteIds')) {
return requestJson;
}
const nextParams = { ...params };
delete nextParams.subminerDuplicateNoteIds;
return {
...requestJson,
params: nextParams,
};
}
private getRequestDuplicateNoteIds(requestJson: Record<string, unknown>): number[] {
const params =
requestJson.params && typeof requestJson.params === 'object'
? (requestJson.params as Record<string, unknown>)
: null;
const rawNoteIds = Array.isArray(params?.subminerDuplicateNoteIds)
? params.subminerDuplicateNoteIds
: [];
return [...new Set(rawNoteIds.filter((entry): entry is number => {
return typeof entry === 'number' && Number.isInteger(entry) && entry > 0;
}))].sort((left, right) => left - right);
}
private requestIncludesAddAction(action: string, requestJson: Record<string, unknown>): boolean { private requestIncludesAddAction(action: string, requestJson: Record<string, unknown>): boolean {
if (action === 'addNote' || action === 'addNotes') { if (action === 'addNote' || action === 'addNotes') {
return true; return true;

View File

@@ -397,3 +397,178 @@ test('CardCreationService uses stream-open-filename for remote media generation'
assert.deepEqual(audioPaths, ['https://audio.example/videoplayback?mime=audio%2Fwebm']); assert.deepEqual(audioPaths, ['https://audio.example/videoplayback?mime=audio%2Fwebm']);
assert.deepEqual(imagePaths, ['https://video.example/videoplayback?mime=video%2Fmp4']); assert.deepEqual(imagePaths, ['https://video.example/videoplayback?mime=video%2Fmp4']);
}); });
test('CardCreationService tracks pre-add duplicate note ids for kiku sentence cards', async () => {
const trackedDuplicates: Array<{ noteId: number; duplicateNoteIds: number[] }> = [];
const duplicateLookupExpressions: string[] = [];
const service = new CardCreationService({
getConfig: () =>
({
deck: 'Mining',
fields: {
word: 'Expression',
sentence: 'Sentence',
audio: 'SentenceAudio',
},
media: {
generateAudio: false,
generateImage: false,
},
behavior: {},
ai: false,
}) as AnkiConnectConfig,
getAiConfig: () => ({}),
getTimingTracker: () => ({}) as never,
getMpvClient: () =>
({
currentVideoPath: '/video.mp4',
currentSubText: '字幕',
currentSubStart: 1,
currentSubEnd: 2,
currentTimePos: 1.5,
currentAudioStreamIndex: 0,
}) as never,
client: {
addNote: async () => 42,
addTags: async () => undefined,
notesInfo: async () => [],
updateNoteFields: async () => undefined,
storeMediaFile: async () => undefined,
findNotes: async () => [],
retrieveMediaFile: async () => '',
},
mediaGenerator: {
generateAudio: async () => null,
generateScreenshot: async () => null,
generateAnimatedImage: async () => null,
},
showOsdNotification: () => undefined,
showUpdateResult: () => undefined,
showStatusNotification: () => undefined,
showNotification: async () => undefined,
beginUpdateProgress: () => undefined,
endUpdateProgress: () => undefined,
withUpdateProgress: async (_message, action) => action(),
resolveConfiguredFieldName: () => null,
resolveNoteFieldName: () => null,
getAnimatedImageLeadInSeconds: async () => 0,
extractFields: () => ({}),
processSentence: (sentence) => sentence,
setCardTypeFields: () => undefined,
mergeFieldValue: (_existing, newValue) => newValue,
formatMiscInfoPattern: () => '',
getEffectiveSentenceCardConfig: () => ({
model: 'Sentence',
sentenceField: 'Sentence',
audioField: 'SentenceAudio',
lapisEnabled: false,
kikuEnabled: true,
kikuFieldGrouping: 'manual',
kikuDeleteDuplicateInAuto: false,
}),
getFallbackDurationSeconds: () => 10,
appendKnownWordsFromNoteInfo: () => undefined,
isUpdateInProgress: () => false,
setUpdateInProgress: () => undefined,
trackLastAddedNoteId: () => undefined,
findDuplicateNoteIds: async (expression) => {
duplicateLookupExpressions.push(expression);
return [18, 7, 30, 7];
},
trackLastAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
trackedDuplicates.push({ noteId, duplicateNoteIds });
},
});
const created = await service.createSentenceCard('重複文', 0, 1);
assert.equal(created, true);
assert.deepEqual(duplicateLookupExpressions, ['重複文']);
assert.deepEqual(trackedDuplicates, [{ noteId: 42, duplicateNoteIds: [7, 18, 30] }]);
});
test('CardCreationService does not track duplicate ids when pre-add lookup returns none', async () => {
const trackedDuplicates: Array<{ noteId: number; duplicateNoteIds: number[] }> = [];
const service = new CardCreationService({
getConfig: () =>
({
deck: 'Mining',
fields: {
word: 'Expression',
sentence: 'Sentence',
audio: 'SentenceAudio',
},
media: {
generateAudio: false,
generateImage: false,
},
behavior: {},
ai: false,
}) as AnkiConnectConfig,
getAiConfig: () => ({}),
getTimingTracker: () => ({}) as never,
getMpvClient: () =>
({
currentVideoPath: '/video.mp4',
currentSubText: '字幕',
currentSubStart: 1,
currentSubEnd: 2,
currentTimePos: 1.5,
currentAudioStreamIndex: 0,
}) as never,
client: {
addNote: async () => 42,
addTags: async () => undefined,
notesInfo: async () => [],
updateNoteFields: async () => undefined,
storeMediaFile: async () => undefined,
findNotes: async () => [],
retrieveMediaFile: async () => '',
},
mediaGenerator: {
generateAudio: async () => null,
generateScreenshot: async () => null,
generateAnimatedImage: async () => null,
},
showOsdNotification: () => undefined,
showUpdateResult: () => undefined,
showStatusNotification: () => undefined,
showNotification: async () => undefined,
beginUpdateProgress: () => undefined,
endUpdateProgress: () => undefined,
withUpdateProgress: async (_message, action) => action(),
resolveConfiguredFieldName: () => null,
resolveNoteFieldName: () => null,
getAnimatedImageLeadInSeconds: async () => 0,
extractFields: () => ({}),
processSentence: (sentence) => sentence,
setCardTypeFields: () => undefined,
mergeFieldValue: (_existing, newValue) => newValue,
formatMiscInfoPattern: () => '',
getEffectiveSentenceCardConfig: () => ({
model: 'Sentence',
sentenceField: 'Sentence',
audioField: 'SentenceAudio',
lapisEnabled: false,
kikuEnabled: true,
kikuFieldGrouping: 'manual',
kikuDeleteDuplicateInAuto: false,
}),
getFallbackDurationSeconds: () => 10,
appendKnownWordsFromNoteInfo: () => undefined,
isUpdateInProgress: () => false,
setUpdateInProgress: () => undefined,
trackLastAddedNoteId: () => undefined,
findDuplicateNoteIds: async () => [],
trackLastAddedDuplicateNoteIds: (noteId, duplicateNoteIds) => {
trackedDuplicates.push({ noteId, duplicateNoteIds });
},
});
const created = await service.createSentenceCard('重複なし', 0, 1);
assert.equal(created, true);
assert.deepEqual(trackedDuplicates, []);
});

View File

@@ -112,6 +112,11 @@ interface CardCreationDeps {
isUpdateInProgress: () => boolean; isUpdateInProgress: () => boolean;
setUpdateInProgress: (value: boolean) => void; setUpdateInProgress: (value: boolean) => void;
trackLastAddedNoteId?: (noteId: number) => void; trackLastAddedNoteId?: (noteId: number) => void;
trackLastAddedDuplicateNoteIds?: (noteId: number, duplicateNoteIds: number[]) => void;
findDuplicateNoteIds?: (
expression: string,
noteInfo: CardCreationNoteInfo,
) => Promise<number[]>;
recordCardsMinedCallback?: (count: number, noteIds?: number[]) => void; recordCardsMinedCallback?: (count: number, noteIds?: number[]) => void;
} }
@@ -548,6 +553,33 @@ export class CardCreationService {
fields[getConfiguredWordFieldName(this.deps.getConfig())] = sentence; fields[getConfiguredWordFieldName(this.deps.getConfig())] = sentence;
} }
const pendingNoteInfo = this.createPendingNoteInfo(fields);
const pendingNoteFields = Object.fromEntries(
Object.entries(fields).map(([name, value]) => [name.toLowerCase(), value]),
);
const pendingExpressionText = getPreferredWordValueFromExtractedFields(
pendingNoteFields,
this.deps.getConfig(),
).trim();
let duplicateNoteIds: number[] = [];
if (
sentenceCardConfig.kikuEnabled &&
sentenceCardConfig.kikuFieldGrouping !== 'disabled' &&
pendingExpressionText &&
this.deps.findDuplicateNoteIds
) {
try {
duplicateNoteIds = sortUniqueNoteIds(
await this.deps.findDuplicateNoteIds(pendingExpressionText, pendingNoteInfo),
);
} catch (error) {
log.warn(
'Failed to capture pre-add duplicate note ids:',
(error as Error).message,
);
}
}
const deck = this.deps.getConfig().deck || 'Default'; const deck = this.deps.getConfig().deck || 'Default';
let noteId: number; let noteId: number;
try { try {
@@ -570,6 +602,14 @@ export class CardCreationService {
log.warn('Failed to track last added note:', (error as Error).message); log.warn('Failed to track last added note:', (error as Error).message);
} }
if (duplicateNoteIds.length > 0) {
try {
this.deps.trackLastAddedDuplicateNoteIds?.(noteId, duplicateNoteIds);
} catch (error) {
log.warn('Failed to track duplicate note ids:', (error as Error).message);
}
}
try { try {
this.deps.recordCardsMinedCallback?.(1, [noteId]); this.deps.recordCardsMinedCallback?.(1, [noteId]);
} catch (error) { } catch (error) {
@@ -685,6 +725,15 @@ export class CardCreationService {
); );
} }
private createPendingNoteInfo(fields: Record<string, string>): CardCreationNoteInfo {
return {
noteId: -1,
fields: Object.fromEntries(
Object.entries(fields).map(([name, value]) => [name, { value }]),
),
};
}
private async mediaGenerateAudio( private async mediaGenerateAudio(
videoPath: string, videoPath: string,
startTime: number, startTime: number,
@@ -764,3 +813,7 @@ export class CardCreationService {
return `image_${timestamp}.${ext}`; return `image_${timestamp}.${ext}`;
} }
} }
function sortUniqueNoteIds(noteIds: number[]): number[] {
return [...new Set(noteIds)].sort((left, right) => left - right);
}

View File

@@ -1,6 +1,6 @@
import test from 'node:test'; import test from 'node:test';
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import { findDuplicateNote, type NoteInfo } from './duplicate'; import { findDuplicateNote, findDuplicateNoteIds, type NoteInfo } from './duplicate';
function createFieldResolver(noteInfo: NoteInfo, preferredName: string): string | null { function createFieldResolver(noteInfo: NoteInfo, preferredName: string): string | null {
const names = Object.keys(noteInfo.fields); const names = Object.keys(noteInfo.fields);
@@ -267,3 +267,62 @@ test('findDuplicateNote does not disable retries on findNotes calls', async () =
assert.ok(seenOptions.length > 0); assert.ok(seenOptions.length > 0);
assert.ok(seenOptions.every((options) => options?.maxRetries !== 0)); assert.ok(seenOptions.every((options) => options?.maxRetries !== 0));
}); });
test('findDuplicateNote stops after the first exact-match chunk', async () => {
const currentNote: NoteInfo = {
noteId: 100,
fields: {
Expression: { value: '貴様' },
},
};
let notesInfoCalls = 0;
const candidateIds = Array.from({ length: 51 }, (_, index) => 200 + index);
const duplicateId = await findDuplicateNote('貴様', 100, currentNote, {
findNotes: async () => candidateIds,
notesInfo: async (noteIds) => {
notesInfoCalls += 1;
return noteIds.map((noteId) => ({
noteId,
fields: {
Expression: { value: noteId === 200 ? '貴様' : `別単語-${noteId}` },
},
}));
},
getDeck: () => 'Japanese::Mining',
resolveFieldName: (noteInfo, preferredName) => createFieldResolver(noteInfo, preferredName),
logWarn: () => {},
});
assert.equal(duplicateId, 200);
assert.equal(notesInfoCalls, 1);
});
test('findDuplicateNoteIds returns no matches when maxMatches is zero', async () => {
const currentNote: NoteInfo = {
noteId: 100,
fields: {
Expression: { value: '貴様' },
},
};
let notesInfoCalls = 0;
const duplicateIds = await findDuplicateNoteIds('貴様', 100, currentNote, {
findNotes: async () => [200],
notesInfo: async (noteIds) => {
notesInfoCalls += 1;
return noteIds.map((noteId) => ({
noteId,
fields: {
Expression: { value: '貴様' },
},
}));
},
getDeck: () => 'Japanese::Mining',
resolveFieldName: (noteInfo, preferredName) => createFieldResolver(noteInfo, preferredName),
logWarn: () => {},
}, 0);
assert.deepEqual(duplicateIds, []);
assert.equal(notesInfoCalls, 0);
});

View File

@@ -24,13 +24,30 @@ export async function findDuplicateNote(
noteInfo: NoteInfo, noteInfo: NoteInfo,
deps: DuplicateDetectionDeps, deps: DuplicateDetectionDeps,
): Promise<number | null> { ): Promise<number | null> {
const duplicateNoteIds = await findDuplicateNoteIds(
expression,
excludeNoteId,
noteInfo,
deps,
1,
);
return duplicateNoteIds[0] ?? null;
}
export async function findDuplicateNoteIds(
expression: string,
excludeNoteId: number,
noteInfo: NoteInfo,
deps: DuplicateDetectionDeps,
maxMatches?: number,
): Promise<number[]> {
const configuredWordFieldCandidates = deps.getWordFieldCandidates?.() ?? ['Expression', 'Word']; const configuredWordFieldCandidates = deps.getWordFieldCandidates?.() ?? ['Expression', 'Word'];
const sourceCandidates = getDuplicateSourceCandidates( const sourceCandidates = getDuplicateSourceCandidates(
noteInfo, noteInfo,
expression, expression,
configuredWordFieldCandidates, configuredWordFieldCandidates,
); );
if (sourceCandidates.length === 0) return null; if (sourceCandidates.length === 0) return [];
deps.logInfo?.( deps.logInfo?.(
`[duplicate] start expr="${expression}" sourceCandidates=${sourceCandidates `[duplicate] start expr="${expression}" sourceCandidates=${sourceCandidates
.map((entry) => `${entry.fieldName}:${entry.value}`) .map((entry) => `${entry.fieldName}:${entry.value}`)
@@ -83,42 +100,49 @@ export async function findDuplicateNote(
} }
} }
return await findFirstExactDuplicateNoteId( return await findExactDuplicateNoteIds(
noteIds, noteIds,
excludeNoteId, excludeNoteId,
sourceCandidates.map((candidate) => candidate.value), sourceCandidates.map((candidate) => candidate.value),
configuredWordFieldCandidates, configuredWordFieldCandidates,
deps, deps,
maxMatches,
); );
} catch (error) { } catch (error) {
deps.logWarn('Duplicate search failed:', error); deps.logWarn('Duplicate search failed:', error);
return null; return [];
} }
} }
function findFirstExactDuplicateNoteId( function findExactDuplicateNoteIds(
candidateNoteIds: Iterable<number>, candidateNoteIds: Iterable<number>,
excludeNoteId: number, excludeNoteId: number,
sourceValues: string[], sourceValues: string[],
candidateFieldNames: string[], candidateFieldNames: string[],
deps: DuplicateDetectionDeps, deps: DuplicateDetectionDeps,
): Promise<number | null> { maxMatches?: number,
): Promise<number[]> {
if (maxMatches !== undefined && maxMatches <= 0) {
return Promise.resolve([]);
}
const candidates = Array.from(candidateNoteIds).filter((id) => id !== excludeNoteId); const candidates = Array.from(candidateNoteIds).filter((id) => id !== excludeNoteId);
deps.logDebug?.(`[duplicate] candidateIds=${candidates.length} exclude=${excludeNoteId}`); deps.logDebug?.(`[duplicate] candidateIds=${candidates.length} exclude=${excludeNoteId}`);
if (candidates.length === 0) { if (candidates.length === 0) {
deps.logInfo?.('[duplicate] no candidates after query + exclude'); deps.logInfo?.('[duplicate] no candidates after query + exclude');
return Promise.resolve(null); return Promise.resolve([]);
} }
const normalizedValues = new Set( const normalizedValues = new Set(
sourceValues.map((value) => normalizeDuplicateValue(value)).filter((value) => value.length > 0), sourceValues.map((value) => normalizeDuplicateValue(value)).filter((value) => value.length > 0),
); );
if (normalizedValues.size === 0) { if (normalizedValues.size === 0) {
return Promise.resolve(null); return Promise.resolve([]);
} }
const chunkSize = 50; const chunkSize = 50;
return (async () => { return (async () => {
const matches: number[] = [];
for (let i = 0; i < candidates.length; i += chunkSize) { for (let i = 0; i < candidates.length; i += chunkSize) {
const chunk = candidates.slice(i, i + chunkSize); const chunk = candidates.slice(i, i + chunkSize);
const notesInfoResult = (await deps.notesInfo(chunk)) as unknown[]; const notesInfoResult = (await deps.notesInfo(chunk)) as unknown[];
@@ -133,13 +157,19 @@ function findFirstExactDuplicateNoteId(
`[duplicate] exact-match noteId=${noteInfo.noteId} field=${resolvedField}`, `[duplicate] exact-match noteId=${noteInfo.noteId} field=${resolvedField}`,
); );
deps.logInfo?.(`[duplicate] matched noteId=${noteInfo.noteId} field=${resolvedField}`); deps.logInfo?.(`[duplicate] matched noteId=${noteInfo.noteId} field=${resolvedField}`);
return noteInfo.noteId; matches.push(noteInfo.noteId);
if (maxMatches !== undefined && matches.length >= maxMatches) {
return matches;
}
break;
} }
} }
} }
} }
if (matches.length === 0) {
deps.logInfo?.('[duplicate] no exact match in candidate notes'); deps.logInfo?.('[duplicate] no exact match in candidate notes');
return null; }
return matches;
})(); })();
} }

View File

@@ -16,6 +16,7 @@ function createHarness(
noteIds?: number[]; noteIds?: number[];
notesInfo?: NoteInfo[][]; notesInfo?: NoteInfo[][];
duplicateNoteId?: number | null; duplicateNoteId?: number | null;
trackedDuplicateNoteIds?: number[] | null;
hasAllConfiguredFields?: boolean; hasAllConfiguredFields?: boolean;
manualHandled?: boolean; manualHandled?: boolean;
expression?: string | null; expression?: string | null;
@@ -74,6 +75,7 @@ function createHarness(
duplicateRequests.push({ expression, excludeNoteId }); duplicateRequests.push({ expression, excludeNoteId });
return options.duplicateNoteId ?? 99; return options.duplicateNoteId ?? 99;
}, },
getTrackedDuplicateNoteIds: () => options.trackedDuplicateNoteIds ?? null,
hasAllConfiguredFields: () => options.hasAllConfiguredFields ?? true, hasAllConfiguredFields: () => options.hasAllConfiguredFields ?? true,
processNewCard: async (noteId, processOptions) => { processNewCard: async (noteId, processOptions) => {
processCalls.push({ noteId, options: processOptions }); processCalls.push({ noteId, options: processOptions });
@@ -223,6 +225,46 @@ test('triggerFieldGroupingForLastAddedCard finds the newest note and hands off t
]); ]);
}); });
test('triggerFieldGroupingForLastAddedCard prefers tracked duplicate note ids before duplicate lookup', async () => {
const harness = createHarness({
noteIds: [7],
notesInfo: [
[
{
noteId: 7,
fields: {
Expression: { value: 'word-7' },
Sentence: { value: 'line-7' },
},
},
],
[
{
noteId: 7,
fields: {
Expression: { value: 'word-7' },
Sentence: { value: 'line-7' },
},
},
],
],
trackedDuplicateNoteIds: [12, 40, 25],
duplicateNoteId: 99,
hasAllConfiguredFields: true,
});
await harness.service.triggerFieldGroupingForLastAddedCard();
assert.deepEqual(harness.duplicateRequests, []);
assert.deepEqual(harness.autoCalls, [
{
originalNoteId: 40,
newNoteId: 7,
expression: 'word-7',
},
]);
});
test('triggerFieldGroupingForLastAddedCard refreshes the card when configured fields are missing', async () => { test('triggerFieldGroupingForLastAddedCard refreshes the card when configured fields are missing', async () => {
const processCalls: Array<{ noteId: number; options?: { skipKikuFieldGrouping?: boolean } }> = []; const processCalls: Array<{ noteId: number; options?: { skipKikuFieldGrouping?: boolean } }> = [];
const harness = createHarness({ const harness = createHarness({

View File

@@ -41,6 +41,7 @@ interface FieldGroupingDeps {
excludeNoteId: number, excludeNoteId: number,
noteInfo: FieldGroupingNoteInfo, noteInfo: FieldGroupingNoteInfo,
) => Promise<number | null>; ) => Promise<number | null>;
getTrackedDuplicateNoteIds?: (noteId: number) => number[] | null;
hasAllConfiguredFields: ( hasAllConfiguredFields: (
noteInfo: FieldGroupingNoteInfo, noteInfo: FieldGroupingNoteInfo,
configuredFieldNames: (string | undefined)[], configuredFieldNames: (string | undefined)[],
@@ -117,11 +118,11 @@ export class FieldGroupingService {
return; return;
} }
const duplicateNoteId = await this.deps.findDuplicateNote( const trackedDuplicateNoteIds = this.deps.getTrackedDuplicateNoteIds?.(noteId) ?? null;
expressionText, const duplicateNoteId =
noteId, trackedDuplicateNoteIds !== null
noteInfoBeforeUpdate, ? pickMostRecentDuplicateNoteId(trackedDuplicateNoteIds, noteId)
); : await this.deps.findDuplicateNote(expressionText, noteId, noteInfoBeforeUpdate);
if (duplicateNoteId === null) { if (duplicateNoteId === null) {
this.deps.showOsdNotification('No duplicate card found'); this.deps.showOsdNotification('No duplicate card found');
return; return;
@@ -243,3 +244,17 @@ export class FieldGroupingService {
} }
} }
} }
function pickMostRecentDuplicateNoteId(
duplicateNoteIds: number[],
excludeNoteId: number,
): number | null {
let bestNoteId: number | null = null;
for (const noteId of duplicateNoteIds) {
if (noteId === excludeNoteId) continue;
if (bestNoteId === null || noteId > bestNoteId) {
bestNoteId = noteId;
}
}
return bestNoteId;
}

View File

@@ -5,6 +5,7 @@ import * as path from 'path';
import test from 'node:test'; import test from 'node:test';
import * as vm from 'node:vm'; import * as vm from 'node:vm';
import { import {
addYomitanNoteViaSearch,
getYomitanDictionaryInfo, getYomitanDictionaryInfo,
importYomitanDictionaryFromZip, importYomitanDictionaryFromZip,
deleteYomitanDictionaryByTitle, deleteYomitanDictionaryByTitle,
@@ -1373,3 +1374,48 @@ test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of
false, false,
); );
}); });
test('addYomitanNoteViaSearch returns note and duplicate ids from the bridge payload', async () => {
const deps = createDeps(async (_script) => ({
noteId: 42,
duplicateNoteIds: [18, 7, 18],
}));
const result = await addYomitanNoteViaSearch('食べる', deps, {
error: () => undefined,
});
assert.deepEqual(result, {
noteId: 42,
duplicateNoteIds: [18, 7, 18],
});
});
test('addYomitanNoteViaSearch rejects invalid numeric note ids from the bridge shortcut', async () => {
const deps = createDeps(async () => NaN);
const result = await addYomitanNoteViaSearch('食べる', deps, {
error: () => undefined,
});
assert.deepEqual(result, {
noteId: null,
duplicateNoteIds: [],
});
});
test('addYomitanNoteViaSearch sanitizes invalid payload note ids while keeping valid duplicate ids', async () => {
const deps = createDeps(async (_script) => ({
noteId: -1,
duplicateNoteIds: [18, 0, 7.5, 7],
}));
const result = await addYomitanNoteViaSearch('食べる', deps, {
error: () => undefined,
});
assert.deepEqual(result, {
noteId: null,
duplicateNoteIds: [18, 7],
});
});

View File

@@ -63,6 +63,11 @@ interface YomitanProfileMetadata {
dictionaryFrequencyModeByName: Partial<Record<string, YomitanFrequencyMode>>; dictionaryFrequencyModeByName: Partial<Record<string, YomitanFrequencyMode>>;
} }
export interface YomitanAddNoteResult {
noteId: number | null;
duplicateNoteIds: number[];
}
const DEFAULT_YOMITAN_SCAN_LENGTH = 40; const DEFAULT_YOMITAN_SCAN_LENGTH = 40;
const yomitanProfileMetadataByWindow = new WeakMap<BrowserWindow, YomitanProfileMetadata>(); const yomitanProfileMetadataByWindow = new WeakMap<BrowserWindow, YomitanProfileMetadata>();
const yomitanFrequencyCacheByWindow = new WeakMap< const yomitanFrequencyCacheByWindow = new WeakMap<
@@ -1984,11 +1989,11 @@ export async function addYomitanNoteViaSearch(
word: string, word: string,
deps: YomitanParserRuntimeDeps, deps: YomitanParserRuntimeDeps,
logger: LoggerLike, logger: LoggerLike,
): Promise<number | null> { ): Promise<YomitanAddNoteResult> {
const isReady = await ensureYomitanParserWindow(deps, logger); const isReady = await ensureYomitanParserWindow(deps, logger);
const parserWindow = deps.getYomitanParserWindow(); const parserWindow = deps.getYomitanParserWindow();
if (!isReady || !parserWindow || parserWindow.isDestroyed()) { if (!isReady || !parserWindow || parserWindow.isDestroyed()) {
return null; return { noteId: null, duplicateNoteIds: [] };
} }
const escapedWord = JSON.stringify(word); const escapedWord = JSON.stringify(word);
@@ -2003,10 +2008,35 @@ export async function addYomitanNoteViaSearch(
`; `;
try { try {
const noteId = await parserWindow.webContents.executeJavaScript(script, true); const result = await parserWindow.webContents.executeJavaScript(script, true);
return typeof noteId === 'number' ? noteId : null; if (typeof result === 'number') {
return {
noteId: Number.isInteger(result) && result > 0 ? result : null,
duplicateNoteIds: [],
};
}
if (result && typeof result === 'object' && !Array.isArray(result)) {
const envelope = result as {
noteId?: unknown;
duplicateNoteIds?: unknown;
};
return {
noteId:
typeof envelope.noteId === 'number' &&
Number.isInteger(envelope.noteId) &&
envelope.noteId > 0
? envelope.noteId
: null,
duplicateNoteIds: Array.isArray(envelope.duplicateNoteIds)
? envelope.duplicateNoteIds.filter(
(entry): entry is number => typeof entry === 'number' && Number.isInteger(entry) && entry > 0,
)
: [],
};
}
return { noteId: null, duplicateNoteIds: [] };
} catch (err) { } catch (err) {
logger.error('Yomitan addNoteFromWord failed:', (err as Error).message); logger.error('Yomitan addNoteFromWord failed:', (err as Error).message);
return null; return { noteId: null, duplicateNoteIds: [] };
} }
} }

View File

@@ -524,6 +524,56 @@ test('popup-visible mpv keybindings still fire for bound keys', async () => {
} }
}); });
test('paused configured subtitle-jump keybinding re-applies pause after backward seek', async () => {
const { handlers, testGlobals } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
handlers.updateKeybindings([
{
key: 'Shift+KeyH',
command: ['sub-seek', -1],
},
] as never);
testGlobals.setPlaybackPausedResponse(true);
testGlobals.dispatchKeydown({ key: 'H', code: 'KeyH', shiftKey: true });
await wait(0);
assert.deepEqual(testGlobals.mpvCommands.slice(-2), [
['sub-seek', -1],
['set_property', 'pause', 'yes'],
]);
} finally {
testGlobals.restore();
}
});
test('configured subtitle-jump keybinding preserves pause when pause state is unknown', async () => {
const { handlers, testGlobals } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
handlers.updateKeybindings([
{
key: 'Shift+KeyH',
command: ['sub-seek', -1],
},
] as never);
testGlobals.setPlaybackPausedResponse(null);
testGlobals.dispatchKeydown({ key: 'H', code: 'KeyH', shiftKey: true });
await wait(0);
assert.deepEqual(testGlobals.mpvCommands.slice(-2), [
['sub-seek', -1],
['set_property', 'pause', 'yes'],
]);
} finally {
testGlobals.restore();
}
});
test('visible-layer y-t dispatches mpv plugin toggle while overlay owns focus', async () => { test('visible-layer y-t dispatches mpv plugin toggle while overlay owns focus', async () => {
const { handlers, testGlobals } = createKeyboardHandlerHarness(); const { handlers, testGlobals } = createKeyboardHandlerHarness();
@@ -1159,6 +1209,56 @@ test('keyboard mode: edge jump while paused re-applies paused state after subtit
} }
}); });
test('keyboard mode: left edge jump while paused re-applies paused state after subtitle seek', async () => {
const { ctx, handlers, testGlobals, setWordCount } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
handlers.handleKeyboardModeToggleRequested();
setWordCount(2);
ctx.state.keyboardSelectedWordIndex = 0;
handlers.syncKeyboardTokenSelection();
testGlobals.setPlaybackPausedResponse(true);
testGlobals.dispatchKeydown({ key: 'ArrowLeft', code: 'ArrowLeft' });
await wait(0);
assert.deepEqual(testGlobals.mpvCommands.slice(-2), [
['sub-seek', -1],
['set_property', 'pause', 'yes'],
]);
} finally {
ctx.state.keyboardDrivenModeEnabled = false;
testGlobals.restore();
}
});
test('keyboard mode: h edge jump while paused re-applies paused state after subtitle seek', async () => {
const { ctx, handlers, testGlobals, setWordCount } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
handlers.handleKeyboardModeToggleRequested();
setWordCount(2);
ctx.state.keyboardSelectedWordIndex = 0;
handlers.syncKeyboardTokenSelection();
testGlobals.setPlaybackPausedResponse(true);
testGlobals.dispatchKeydown({ key: 'h', code: 'KeyH' });
await wait(0);
assert.deepEqual(testGlobals.mpvCommands.slice(-2), [
['sub-seek', -1],
['set_property', 'pause', 'yes'],
]);
} finally {
ctx.state.keyboardDrivenModeEnabled = false;
testGlobals.restore();
}
});
test('keyboard mode: edge jump with unknown pause state re-applies pause conservatively', async () => { test('keyboard mode: edge jump with unknown pause state re-applies pause conservatively', async () => {
const { ctx, handlers, testGlobals, setWordCount } = createKeyboardHandlerHarness(); const { ctx, handlers, testGlobals, setWordCount } = createKeyboardHandlerHarness();

View File

@@ -358,6 +358,33 @@ export function createKeyboardHandlers(
}); });
} }
function isSubtitleSeekCommand(command: (string | number)[] | undefined): command is [string, number] {
return (
Array.isArray(command) &&
command[0] === 'sub-seek' &&
typeof command[1] === 'number'
);
}
function dispatchConfiguredMpvCommand(command: (string | number)[]): void {
if (!isSubtitleSeekCommand(command)) {
window.electronAPI.sendMpvCommand(command);
return;
}
void options
.getPlaybackPaused()
.then((paused) => {
window.electronAPI.sendMpvCommand(command);
if (paused !== false) {
window.electronAPI.sendMpvCommand(['set_property', 'pause', 'yes']);
}
})
.catch(() => {
window.electronAPI.sendMpvCommand(command);
});
}
type ScanModifierState = { type ScanModifierState = {
shiftKey?: boolean; shiftKey?: boolean;
ctrlKey?: boolean; ctrlKey?: boolean;
@@ -954,7 +981,7 @@ export function createKeyboardHandlers(
if (command) { if (command) {
e.preventDefault(); e.preventDefault();
window.electronAPI.sendMpvCommand(command); dispatchConfiguredMpvCommand(command);
} }
}); });

View File

@@ -0,0 +1,35 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { prepareForKikuFieldGroupingOpen } from './kiku-open';
test('prepareForKikuFieldGroupingOpen closes lookup popup before pausing playback', () => {
const calls: string[] = [];
prepareForKikuFieldGroupingOpen({
closeLookupWindow: () => {
calls.push('close');
return true;
},
pausePlayback: () => {
calls.push('pause');
},
});
assert.deepEqual(calls, ['close', 'pause']);
});
test('prepareForKikuFieldGroupingOpen still pauses playback when no popup is open', () => {
const calls: string[] = [];
prepareForKikuFieldGroupingOpen({
closeLookupWindow: () => {
calls.push('close');
return false;
},
pausePlayback: () => {
calls.push('pause');
},
});
assert.deepEqual(calls, ['close', 'pause']);
});

View File

@@ -0,0 +1,7 @@
export function prepareForKikuFieldGroupingOpen(options: {
closeLookupWindow: () => boolean;
pausePlayback: () => void;
}): void {
options.closeLookupWindow();
options.pausePlayback();
}

View File

@@ -33,6 +33,7 @@ import { createControllerDebugModal } from './modals/controller-debug.js';
import { createControllerSelectModal } from './modals/controller-select.js'; import { createControllerSelectModal } from './modals/controller-select.js';
import { createJimakuModal } from './modals/jimaku.js'; import { createJimakuModal } from './modals/jimaku.js';
import { createKikuModal } from './modals/kiku.js'; import { createKikuModal } from './modals/kiku.js';
import { prepareForKikuFieldGroupingOpen } from './kiku-open.js';
import { createPlaylistBrowserModal } from './modals/playlist-browser.js'; import { createPlaylistBrowserModal } from './modals/playlist-browser.js';
import { createSessionHelpModal } from './modals/session-help.js'; import { createSessionHelpModal } from './modals/session-help.js';
import { createSubtitleSidebarModal } from './modals/subtitle-sidebar.js'; import { createSubtitleSidebarModal } from './modals/subtitle-sidebar.js';
@@ -470,6 +471,12 @@ function registerModalOpenHandlers(): void {
window.electronAPI.onKikuFieldGroupingRequest( window.electronAPI.onKikuFieldGroupingRequest(
(data: { original: KikuDuplicateCardInfo; duplicate: KikuDuplicateCardInfo }) => { (data: { original: KikuDuplicateCardInfo; duplicate: KikuDuplicateCardInfo }) => {
runGuarded('kiku:field-grouping-open', () => { runGuarded('kiku:field-grouping-open', () => {
prepareForKikuFieldGroupingOpen({
closeLookupWindow: () => keyboardHandlers.closeLookupWindow(),
pausePlayback: () => {
window.electronAPI.sendMpvCommand(['set_property', 'pause', 'yes']);
},
});
kikuModal.openKikuFieldGroupingModal(data); kikuModal.openKikuFieldGroupingModal(data);
window.electronAPI.notifyOverlayModalOpened('kiku'); window.electronAPI.notifyOverlayModalOpened('kiku');
}); });

View File

@@ -147,7 +147,7 @@ async function main(): Promise<void> {
{ forceOverride: true }, { forceOverride: true },
); );
const noteId = await addYomitanNoteViaSearch( const addResult = await addYomitanNoteViaSearch(
word!, word!,
{ {
getYomitanExt: () => yomitanExt, getYomitanExt: () => yomitanExt,
@@ -168,6 +168,7 @@ async function main(): Promise<void> {
logger, logger,
); );
const noteId = addResult.noteId;
if (typeof noteId !== 'number') { if (typeof noteId !== 'number') {
throw new Error('Yomitan failed to create note.'); throw new Error('Yomitan failed to create note.');
} }