From ceb064b80478c9dd5200a08a2ab6e1839f3a4b86 Mon Sep 17 00:00:00 2001 From: sudacode Date: Sat, 28 Feb 2026 04:15:58 -0800 Subject: [PATCH] Add fallback enqueue when addNote no --- .../anki-connect-proxy.test.ts | 30 +++++++++ src/anki-integration/anki-connect-proxy.ts | 3 +- .../services/tokenizer/annotation-stage.ts | 2 + src/token-merger.test.ts | 65 +++++++++++++++++++ 4 files changed, 99 insertions(+), 1 deletion(-) create mode 100644 src/token-merger.test.ts diff --git a/src/anki-integration/anki-connect-proxy.test.ts b/src/anki-integration/anki-connect-proxy.test.ts index 9a8de47..7feb398 100644 --- a/src/anki-integration/anki-connect-proxy.test.ts +++ b/src/anki-integration/anki-connect-proxy.test.ts @@ -230,6 +230,36 @@ test('proxy ignores addNote when upstream response reports error', async () => { assert.deepEqual(processed, []); }); +test('proxy falls back to latest added note when addNote returns no IDs', async () => { + const processed: number[] = []; + const findNotesQueries: string[] = []; + const proxy = new AnkiConnectProxyServer({ + shouldAutoUpdateNewCards: () => true, + processNewCard: async (noteId) => { + processed.push(noteId); + }, + getDeck: () => 'My "Japanese" Deck', + findNotes: async (query) => { + findNotesQueries.push(query); + return [500, 501]; + }, + logInfo: () => undefined, + logWarn: () => undefined, + logError: () => undefined, + }); + + (proxy as unknown as { + maybeEnqueueFromRequest: (request: Record, responseBody: Buffer) => void; + }).maybeEnqueueFromRequest( + { action: 'addNote' }, + Buffer.from(JSON.stringify({ result: [], error: null }), 'utf8'), + ); + + await waitForCondition(() => processed.length === 1); + assert.deepEqual(findNotesQueries, ['"deck:My \\"Japanese\\" Deck" added:1']); + assert.deepEqual(processed, [501]); +}); + test('proxy does not fallback-enqueue latest note for multi requests without add actions', async () => { const processed: number[] = []; const findNotesQueries: string[] = []; diff --git a/src/anki-integration/anki-connect-proxy.ts b/src/anki-integration/anki-connect-proxy.ts index 4d75df9..3f12fa6 100644 --- a/src/anki-integration/anki-connect-proxy.ts +++ b/src/anki-integration/anki-connect-proxy.ts @@ -233,7 +233,8 @@ export class AnkiConnectProxyServer { try { const deck = this.deps.getDeck ? this.deps.getDeck() : undefined; - const query = deck ? `"deck:${deck}" added:1` : 'added:1'; + const escapedDeck = deck ? deck.replace(/"/g, '\\"') : null; + const query = escapedDeck ? `"deck:${escapedDeck}" added:1` : 'added:1'; const noteIds = await findNotes(query, { maxRetries: 0 }); if (!noteIds || noteIds.length === 0) { return; diff --git a/src/core/services/tokenizer/annotation-stage.ts b/src/core/services/tokenizer/annotation-stage.ts index dde4e23..8e60fc5 100644 --- a/src/core/services/tokenizer/annotation-stage.ts +++ b/src/core/services/tokenizer/annotation-stage.ts @@ -141,6 +141,8 @@ function applyFrequencyMarking( return { ...token, frequencyRank: undefined }; } + // Existing Yomitan frequency ranks take precedence over the local lookup. + // Keep this early return to preserve explicitly-supplied values. if (typeof token.frequencyRank === 'number' && Number.isFinite(token.frequencyRank)) { const rank = Math.max(1, Math.floor(token.frequencyRank)); return { ...token, frequencyRank: rank }; diff --git a/src/token-merger.test.ts b/src/token-merger.test.ts new file mode 100644 index 0000000..670e950 --- /dev/null +++ b/src/token-merger.test.ts @@ -0,0 +1,65 @@ +import assert from 'node:assert/strict'; +import test from 'node:test'; +import { MergedToken, PartOfSpeech } from './types'; +import { markNPlusOneTargets } from './token-merger'; + +function makeToken(overrides: Partial = {}): MergedToken { + return { + surface: '猫', + reading: 'ネコ', + headword: '猫', + startPos: 0, + endPos: 1, + partOfSpeech: PartOfSpeech.noun, + isMerged: false, + isKnown: false, + isNPlusOneTarget: false, + ...overrides, + }; +} + +test('markNPlusOneTargets counts only eligible tokens toward minSentenceWords', () => { + const tokens = [ + makeToken({ + surface: 'は', + headword: 'は', + reading: 'ハ', + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + startPos: 0, + endPos: 1, + }), + makeToken({ + surface: 'に', + headword: 'に', + reading: 'ニ', + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + startPos: 1, + endPos: 2, + }), + makeToken({ + surface: 'を', + headword: 'を', + reading: 'ヲ', + partOfSpeech: PartOfSpeech.particle, + pos1: '助詞', + startPos: 2, + endPos: 3, + }), + makeToken({ + surface: '猫', + headword: '猫', + partOfSpeech: PartOfSpeech.noun, + startPos: 3, + endPos: 4, + }), + ]; + + const result = markNPlusOneTargets(tokens, 3); + + assert.equal(result[0]?.isNPlusOneTarget, false); + assert.equal(result[1]?.isNPlusOneTarget, false); + assert.equal(result[2]?.isNPlusOneTarget, false); + assert.equal(result[3]?.isNPlusOneTarget, false); +});