mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-20 12:11:28 -07:00
Overlay 2.0 (#12)
This commit is contained in:
334
src/anki-integration/anki-connect-proxy.test.ts
Normal file
334
src/anki-integration/anki-connect-proxy.test.ts
Normal file
@@ -0,0 +1,334 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { AnkiConnectProxyServer } from './anki-connect-proxy';
|
||||
|
||||
async function waitForCondition(
|
||||
condition: () => boolean,
|
||||
timeoutMs = 2000,
|
||||
intervalMs = 10,
|
||||
): Promise<void> {
|
||||
const startedAt = Date.now();
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
if (condition()) return;
|
||||
await new Promise((resolve) => setTimeout(resolve, intervalMs));
|
||||
}
|
||||
throw new Error('Timed out waiting for condition');
|
||||
}
|
||||
|
||||
test('proxy enqueues addNote result for enrichment', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{ action: 'addNote' },
|
||||
Buffer.from(JSON.stringify({ result: 42, error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(processed, [42]);
|
||||
});
|
||||
|
||||
test('proxy enqueues addNote bare numeric response for enrichment', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest({ action: 'addNote' }, Buffer.from('42', 'utf8'));
|
||||
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(processed, [42]);
|
||||
});
|
||||
|
||||
test('proxy de-duplicates addNotes IDs within the same response', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{ action: 'addNotes' },
|
||||
Buffer.from(JSON.stringify({ result: [101, 102, 101, null], error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 2);
|
||||
assert.deepEqual(processed, [101, 102]);
|
||||
});
|
||||
|
||||
test('proxy enqueues note IDs from multi action addNote/addNotes results', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{
|
||||
action: 'multi',
|
||||
params: {
|
||||
actions: [{ action: 'version' }, { action: 'addNote' }, { action: 'addNotes' }],
|
||||
},
|
||||
},
|
||||
Buffer.from(JSON.stringify({ result: [6, 777, [888, 777, null]], error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 2);
|
||||
assert.deepEqual(processed, [777, 888]);
|
||||
});
|
||||
|
||||
test('proxy enqueues note IDs from bare multi action results', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{
|
||||
action: 'multi',
|
||||
params: {
|
||||
actions: [{ action: 'version' }, { action: 'addNote' }, { action: 'addNotes' }],
|
||||
},
|
||||
},
|
||||
Buffer.from(JSON.stringify([6, 777, [888, null]]), 'utf8'),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 2);
|
||||
assert.deepEqual(processed, [777, 888]);
|
||||
});
|
||||
|
||||
test('proxy enqueues note IDs from multi action envelope results', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{
|
||||
action: 'multi',
|
||||
params: {
|
||||
actions: [{ action: 'version' }, { action: 'addNote' }, { action: 'addNotes' }],
|
||||
},
|
||||
},
|
||||
Buffer.from(
|
||||
JSON.stringify({
|
||||
result: [
|
||||
{ result: 6, error: null },
|
||||
{ result: 777, error: null },
|
||||
{ result: [888, 777, null], error: null },
|
||||
],
|
||||
error: null,
|
||||
}),
|
||||
'utf8',
|
||||
),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 2);
|
||||
assert.deepEqual(processed, [777, 888]);
|
||||
});
|
||||
|
||||
test('proxy skips auto-enrichment when auto-update is disabled', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => false,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{ action: 'addNote' },
|
||||
Buffer.from(JSON.stringify({ result: 303, error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
assert.deepEqual(processed, []);
|
||||
});
|
||||
|
||||
test('proxy ignores addNote when upstream response reports error', async () => {
|
||||
const processed: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{ action: 'addNote' },
|
||||
Buffer.from(JSON.stringify({ result: 123, error: 'duplicate' }), 'utf8'),
|
||||
);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
assert.deepEqual(processed, []);
|
||||
});
|
||||
|
||||
test('proxy does not fallback-enqueue latest note for multi requests without add actions', async () => {
|
||||
const processed: number[] = [];
|
||||
const findNotesQueries: string[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
getDeck: () => 'Mining',
|
||||
findNotes: async (query) => {
|
||||
findNotesQueries.push(query);
|
||||
return [999];
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{
|
||||
action: 'multi',
|
||||
params: {
|
||||
actions: [{ action: 'version' }, { action: 'deckNames' }],
|
||||
},
|
||||
},
|
||||
Buffer.from(JSON.stringify({ result: [6, ['Default']], error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 30));
|
||||
assert.deepEqual(findNotesQueries, []);
|
||||
assert.deepEqual(processed, []);
|
||||
});
|
||||
|
||||
test('proxy fallback-enqueues latest note for addNote responses without note IDs and escapes deck quotes', async () => {
|
||||
const processed: number[] = [];
|
||||
const findNotesQueries: string[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
getDeck: () => 'My "Japanese" Deck',
|
||||
findNotes: async (query) => {
|
||||
findNotesQueries.push(query);
|
||||
return [500, 501];
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
(
|
||||
proxy as unknown as {
|
||||
maybeEnqueueFromRequest: (request: Record<string, unknown>, responseBody: Buffer) => void;
|
||||
}
|
||||
).maybeEnqueueFromRequest(
|
||||
{ action: 'addNote' },
|
||||
Buffer.from(JSON.stringify({ result: 0, error: null }), 'utf8'),
|
||||
);
|
||||
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(findNotesQueries, ['"deck:My \\"Japanese\\" Deck" added:1']);
|
||||
assert.deepEqual(processed, [501]);
|
||||
});
|
||||
|
||||
test('proxy detects self-referential loop configuration', () => {
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async () => undefined,
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
const result = (
|
||||
proxy as unknown as {
|
||||
isSelfReferentialProxy: (options: {
|
||||
host: string;
|
||||
port: number;
|
||||
upstreamUrl: string;
|
||||
}) => boolean;
|
||||
}
|
||||
).isSelfReferentialProxy({
|
||||
host: '127.0.0.1',
|
||||
port: 8766,
|
||||
upstreamUrl: 'http://localhost:8766',
|
||||
});
|
||||
|
||||
assert.equal(result, true);
|
||||
});
|
||||
465
src/anki-integration/anki-connect-proxy.ts
Normal file
465
src/anki-integration/anki-connect-proxy.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
import http, { IncomingMessage, ServerResponse } from 'node:http';
|
||||
import axios, { AxiosInstance } from 'axios';
|
||||
|
||||
interface StartProxyOptions {
|
||||
host: string;
|
||||
port: number;
|
||||
upstreamUrl: string;
|
||||
}
|
||||
|
||||
interface AnkiConnectEnvelope {
|
||||
result: unknown;
|
||||
error: unknown;
|
||||
}
|
||||
|
||||
export interface AnkiConnectProxyServerDeps {
|
||||
shouldAutoUpdateNewCards: () => boolean;
|
||||
processNewCard: (noteId: number) => Promise<void>;
|
||||
getDeck?: () => string | undefined;
|
||||
findNotes?: (
|
||||
query: string,
|
||||
options?: {
|
||||
maxRetries?: number;
|
||||
},
|
||||
) => Promise<number[]>;
|
||||
logInfo: (message: string, ...args: unknown[]) => void;
|
||||
logWarn: (message: string, ...args: unknown[]) => void;
|
||||
logError: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
export class AnkiConnectProxyServer {
|
||||
private server: http.Server | null = null;
|
||||
private client: AxiosInstance;
|
||||
private pendingNoteIds: number[] = [];
|
||||
private pendingNoteIdSet = new Set<number>();
|
||||
private inFlightNoteIds = new Set<number>();
|
||||
private processingQueue = false;
|
||||
|
||||
constructor(private readonly deps: AnkiConnectProxyServerDeps) {
|
||||
this.client = axios.create({
|
||||
timeout: 15000,
|
||||
validateStatus: () => true,
|
||||
responseType: 'arraybuffer',
|
||||
});
|
||||
}
|
||||
|
||||
get isRunning(): boolean {
|
||||
return this.server !== null;
|
||||
}
|
||||
|
||||
start(options: StartProxyOptions): void {
|
||||
this.stop();
|
||||
|
||||
if (this.isSelfReferentialProxy(options)) {
|
||||
this.deps.logError(
|
||||
'[anki-proxy] Proxy upstream points to proxy host/port; refusing to start to avoid loop.',
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
this.server = http.createServer((req, res) => {
|
||||
void this.handleRequest(req, res, options.upstreamUrl);
|
||||
});
|
||||
|
||||
this.server.on('error', (error) => {
|
||||
this.deps.logError('[anki-proxy] Server error:', (error as Error).message);
|
||||
});
|
||||
|
||||
this.server.listen(options.port, options.host, () => {
|
||||
this.deps.logInfo(
|
||||
`[anki-proxy] Listening on http://${options.host}:${options.port} -> ${options.upstreamUrl}`,
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
stop(): void {
|
||||
if (this.server) {
|
||||
this.server.close();
|
||||
this.server = null;
|
||||
this.deps.logInfo('[anki-proxy] Stopped');
|
||||
}
|
||||
this.pendingNoteIds = [];
|
||||
this.pendingNoteIdSet.clear();
|
||||
this.inFlightNoteIds.clear();
|
||||
this.processingQueue = false;
|
||||
}
|
||||
|
||||
private isSelfReferentialProxy(options: StartProxyOptions): boolean {
|
||||
try {
|
||||
const upstream = new URL(options.upstreamUrl);
|
||||
const normalizedUpstreamHost = upstream.hostname.toLowerCase();
|
||||
const normalizedBindHost = options.host.toLowerCase();
|
||||
const upstreamPort =
|
||||
upstream.port.length > 0
|
||||
? Number(upstream.port)
|
||||
: upstream.protocol === 'https:'
|
||||
? 443
|
||||
: 80;
|
||||
const hostMatches =
|
||||
normalizedUpstreamHost === normalizedBindHost ||
|
||||
(normalizedUpstreamHost === 'localhost' && normalizedBindHost === '127.0.0.1') ||
|
||||
(normalizedUpstreamHost === '127.0.0.1' && normalizedBindHost === 'localhost');
|
||||
return hostMatches && upstreamPort === options.port;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private async handleRequest(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse<IncomingMessage>,
|
||||
upstreamUrl: string,
|
||||
): Promise<void> {
|
||||
this.setCorsHeaders(res);
|
||||
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.statusCode = 204;
|
||||
res.end();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!req.method || (req.method !== 'GET' && req.method !== 'POST')) {
|
||||
res.statusCode = 405;
|
||||
res.end('Method Not Allowed');
|
||||
return;
|
||||
}
|
||||
|
||||
let rawBody: Buffer = Buffer.alloc(0);
|
||||
if (req.method === 'POST') {
|
||||
rawBody = await this.readRequestBody(req);
|
||||
}
|
||||
|
||||
let requestJson: Record<string, unknown> | null = null;
|
||||
if (req.method === 'POST' && rawBody.length > 0) {
|
||||
requestJson = this.tryParseJson(rawBody);
|
||||
}
|
||||
|
||||
try {
|
||||
const targetUrl = new URL(req.url || '/', upstreamUrl).toString();
|
||||
const contentType =
|
||||
typeof req.headers['content-type'] === 'string'
|
||||
? req.headers['content-type']
|
||||
: 'application/json';
|
||||
const upstreamResponse = await this.client.request<ArrayBuffer>({
|
||||
url: targetUrl,
|
||||
method: req.method,
|
||||
data: req.method === 'POST' ? rawBody : undefined,
|
||||
headers: {
|
||||
'content-type': contentType,
|
||||
},
|
||||
});
|
||||
|
||||
const responseBody: Buffer = Buffer.isBuffer(upstreamResponse.data)
|
||||
? upstreamResponse.data
|
||||
: Buffer.from(new Uint8Array(upstreamResponse.data));
|
||||
this.copyUpstreamHeaders(res, upstreamResponse.headers as Record<string, unknown>);
|
||||
res.statusCode = upstreamResponse.status;
|
||||
res.end(responseBody);
|
||||
|
||||
if (req.method === 'POST') {
|
||||
this.maybeEnqueueFromRequest(requestJson, responseBody);
|
||||
}
|
||||
} catch (error) {
|
||||
this.deps.logWarn('[anki-proxy] Failed to forward request:', (error as Error).message);
|
||||
res.statusCode = 502;
|
||||
res.end('Bad Gateway');
|
||||
}
|
||||
}
|
||||
|
||||
private maybeEnqueueFromRequest(
|
||||
requestJson: Record<string, unknown> | null,
|
||||
responseBody: Buffer,
|
||||
): void {
|
||||
if (!requestJson || !this.deps.shouldAutoUpdateNewCards()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const action =
|
||||
typeof requestJson.action === 'string'
|
||||
? requestJson.action
|
||||
: String(requestJson.action ?? '');
|
||||
if (action !== 'addNote' && action !== 'addNotes' && action !== 'multi') {
|
||||
return;
|
||||
}
|
||||
const shouldFallbackToLatestAdded = this.requestIncludesAddAction(action, requestJson);
|
||||
|
||||
const parsedResponse = this.tryParseJsonValue(responseBody);
|
||||
if (parsedResponse === null || parsedResponse === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const responseResult = this.extractSuccessfulResult(parsedResponse);
|
||||
if (responseResult === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const noteIds =
|
||||
action === 'multi'
|
||||
? this.collectMultiResultIds(requestJson, responseResult)
|
||||
: this.collectNoteIdsForAction(action, responseResult);
|
||||
if (noteIds.length === 0 && shouldFallbackToLatestAdded) {
|
||||
void this.enqueueMostRecentAddedNote();
|
||||
return;
|
||||
}
|
||||
|
||||
this.enqueueNotes(noteIds);
|
||||
}
|
||||
|
||||
private requestIncludesAddAction(action: string, requestJson: Record<string, unknown>): boolean {
|
||||
if (action === 'addNote' || action === 'addNotes') {
|
||||
return true;
|
||||
}
|
||||
if (action !== 'multi') {
|
||||
return false;
|
||||
}
|
||||
const params =
|
||||
requestJson.params && typeof requestJson.params === 'object'
|
||||
? (requestJson.params as Record<string, unknown>)
|
||||
: null;
|
||||
const actions = Array.isArray(params?.actions) ? params.actions : [];
|
||||
if (actions.length === 0) {
|
||||
return false;
|
||||
}
|
||||
return actions.some((entry) => {
|
||||
if (!entry || typeof entry !== 'object') return false;
|
||||
const actionName = (entry as Record<string, unknown>).action;
|
||||
return actionName === 'addNote' || actionName === 'addNotes';
|
||||
});
|
||||
}
|
||||
|
||||
private async enqueueMostRecentAddedNote(): Promise<void> {
|
||||
const findNotes = this.deps.findNotes;
|
||||
if (!findNotes) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const deck = this.deps.getDeck ? this.deps.getDeck() : undefined;
|
||||
const escapedDeck = deck ? deck.replace(/"/g, '\\"') : undefined;
|
||||
const query = escapedDeck ? `"deck:${escapedDeck}" added:1` : 'added:1';
|
||||
const noteIds = await findNotes(query, { maxRetries: 0 });
|
||||
if (!noteIds || noteIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
const latestNoteId = Math.max(...noteIds);
|
||||
this.deps.logInfo(
|
||||
`[anki-proxy] Falling back to latest added note ${latestNoteId} (response did not include note IDs)`,
|
||||
);
|
||||
this.enqueueNotes([latestNoteId]);
|
||||
} catch (error) {
|
||||
this.deps.logWarn(
|
||||
'[anki-proxy] Failed latest-note fallback lookup:',
|
||||
(error as Error).message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private collectNoteIdsForAction(action: string, result: unknown): number[] {
|
||||
if (action === 'addNote') {
|
||||
return this.collectSingleResultId(result);
|
||||
}
|
||||
if (action === 'addNotes') {
|
||||
return this.collectBatchResultIds(result);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
private collectMultiResultIds(requestJson: Record<string, unknown>, result: unknown): number[] {
|
||||
if (!Array.isArray(result)) {
|
||||
return [];
|
||||
}
|
||||
const params =
|
||||
requestJson.params && typeof requestJson.params === 'object'
|
||||
? (requestJson.params as Record<string, unknown>)
|
||||
: null;
|
||||
const actions = Array.isArray(params?.actions) ? params.actions : [];
|
||||
if (actions.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const noteIds: number[] = [];
|
||||
const count = Math.min(actions.length, result.length);
|
||||
for (let index = 0; index < count; index += 1) {
|
||||
const actionEntry = actions[index];
|
||||
if (!actionEntry || typeof actionEntry !== 'object') {
|
||||
continue;
|
||||
}
|
||||
const actionName =
|
||||
typeof (actionEntry as Record<string, unknown>).action === 'string'
|
||||
? ((actionEntry as Record<string, unknown>).action as string)
|
||||
: '';
|
||||
const actionResult = this.extractMultiActionResult(result[index]);
|
||||
if (actionResult === null) {
|
||||
continue;
|
||||
}
|
||||
noteIds.push(...this.collectNoteIdsForAction(actionName, actionResult));
|
||||
}
|
||||
return noteIds;
|
||||
}
|
||||
|
||||
private extractMultiActionResult(value: unknown): unknown | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const envelope = value as Record<string, unknown>;
|
||||
if (!Object.prototype.hasOwnProperty.call(envelope, 'result')) {
|
||||
return value;
|
||||
}
|
||||
|
||||
if (envelope.error !== null && envelope.error !== undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return envelope.result;
|
||||
}
|
||||
|
||||
private collectSingleResultId(value: unknown): number[] {
|
||||
if (typeof value === 'number' && Number.isInteger(value) && value > 0) {
|
||||
return [value];
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
private collectBatchResultIds(value: unknown): number[] {
|
||||
if (!Array.isArray(value)) {
|
||||
return [];
|
||||
}
|
||||
return value.filter((entry): entry is number => {
|
||||
return typeof entry === 'number' && Number.isInteger(entry) && entry > 0;
|
||||
});
|
||||
}
|
||||
|
||||
private enqueueNotes(noteIds: number[]): void {
|
||||
let enqueuedCount = 0;
|
||||
for (const noteId of noteIds) {
|
||||
if (this.pendingNoteIdSet.has(noteId) || this.inFlightNoteIds.has(noteId)) {
|
||||
continue;
|
||||
}
|
||||
this.pendingNoteIds.push(noteId);
|
||||
this.pendingNoteIdSet.add(noteId);
|
||||
enqueuedCount += 1;
|
||||
}
|
||||
|
||||
if (enqueuedCount === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.deps.logInfo(`[anki-proxy] Enqueued ${enqueuedCount} note(s) for enrichment`);
|
||||
this.processQueue();
|
||||
}
|
||||
|
||||
private processQueue(): void {
|
||||
if (this.processingQueue) {
|
||||
return;
|
||||
}
|
||||
this.processingQueue = true;
|
||||
|
||||
void (async () => {
|
||||
try {
|
||||
while (this.pendingNoteIds.length > 0) {
|
||||
const noteId = this.pendingNoteIds.shift();
|
||||
if (noteId === undefined) {
|
||||
continue;
|
||||
}
|
||||
this.pendingNoteIdSet.delete(noteId);
|
||||
|
||||
if (!this.deps.shouldAutoUpdateNewCards()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.inFlightNoteIds.add(noteId);
|
||||
try {
|
||||
await this.deps.processNewCard(noteId);
|
||||
} catch (error) {
|
||||
this.deps.logWarn(
|
||||
`[anki-proxy] Failed to auto-enrich note ${noteId}:`,
|
||||
(error as Error).message,
|
||||
);
|
||||
} finally {
|
||||
this.inFlightNoteIds.delete(noteId);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
this.processingQueue = false;
|
||||
if (this.pendingNoteIds.length > 0) {
|
||||
this.processQueue();
|
||||
}
|
||||
}
|
||||
})();
|
||||
}
|
||||
|
||||
private async readRequestBody(req: IncomingMessage): Promise<Buffer> {
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of req) {
|
||||
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
}
|
||||
|
||||
private tryParseJson(rawBody: Buffer): Record<string, unknown> | null {
|
||||
if (rawBody.length === 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
const parsed = JSON.parse(rawBody.toString('utf8'));
|
||||
return parsed && typeof parsed === 'object' ? (parsed as Record<string, unknown>) : null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private tryParseJsonValue(rawBody: Buffer): unknown {
|
||||
if (rawBody.length === 0) {
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
return JSON.parse(rawBody.toString('utf8'));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private extractSuccessfulResult(value: unknown): unknown | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const envelope = value as Partial<AnkiConnectEnvelope>;
|
||||
if (!Object.prototype.hasOwnProperty.call(envelope, 'result')) {
|
||||
return value;
|
||||
}
|
||||
if (envelope.error !== null && envelope.error !== undefined) {
|
||||
return null;
|
||||
}
|
||||
return envelope.result;
|
||||
}
|
||||
|
||||
private setCorsHeaders(res: ServerResponse<IncomingMessage>): void {
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Headers', 'Content-Type');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'POST, GET, OPTIONS');
|
||||
}
|
||||
|
||||
private copyUpstreamHeaders(
|
||||
res: ServerResponse<IncomingMessage>,
|
||||
headers: Record<string, unknown>,
|
||||
): void {
|
||||
for (const [key, value] of Object.entries(headers)) {
|
||||
if (value === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (key.toLowerCase() === 'content-length') {
|
||||
continue;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
res.setHeader(
|
||||
key,
|
||||
value.map((entry) => String(entry)),
|
||||
);
|
||||
} else {
|
||||
res.setHeader(key, String(value));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -89,7 +89,11 @@ test('findDuplicateNote checks both source expression/word values when both fiel
|
||||
if (query.includes('昨日は雨だった。')) {
|
||||
return [];
|
||||
}
|
||||
if (query.includes('"Word:雨"') || query.includes('"word:雨"') || query.includes('"Expression:雨"')) {
|
||||
if (
|
||||
query.includes('"Word:雨"') ||
|
||||
query.includes('"word:雨"') ||
|
||||
query.includes('"Expression:雨"')
|
||||
) {
|
||||
return [200];
|
||||
}
|
||||
return [];
|
||||
|
||||
@@ -32,9 +32,7 @@ export async function findDuplicateNote(
|
||||
);
|
||||
|
||||
const deckValue = deps.getDeck();
|
||||
const queryPrefixes = deckValue
|
||||
? [`"deck:${escapeAnkiSearchValue(deckValue)}" `, '']
|
||||
: [''];
|
||||
const queryPrefixes = deckValue ? [`"deck:${escapeAnkiSearchValue(deckValue)}" `, ''] : [''];
|
||||
|
||||
try {
|
||||
const noteIds = new Set<number>();
|
||||
|
||||
@@ -302,7 +302,7 @@ export class FieldGroupingMergeCollaborator {
|
||||
const unique: { groupId: number; content: string }[] = [];
|
||||
const seen = new Set<string>();
|
||||
for (const entry of entries) {
|
||||
const key = `${entry.groupId}::${entry.content}`;
|
||||
const key = entry.content;
|
||||
if (seen.has(key)) continue;
|
||||
seen.add(key);
|
||||
unique.push(entry);
|
||||
@@ -361,6 +361,10 @@ export class FieldGroupingMergeCollaborator {
|
||||
return ungrouped;
|
||||
}
|
||||
|
||||
private getPictureDedupKey(tag: string): string {
|
||||
return tag.replace(/\sdata-group-id="[^"]*"/gi, '').trim();
|
||||
}
|
||||
|
||||
private getStrictSpanGroupingFields(): Set<string> {
|
||||
const strictFields = new Set(this.strictGroupingFieldDefaults);
|
||||
const sentenceCardConfig = this.deps.getEffectiveSentenceCardConfig();
|
||||
@@ -394,11 +398,12 @@ export class FieldGroupingMergeCollaborator {
|
||||
const mergedTags = keepEntries.map((entry) =>
|
||||
this.ensureImageGroupId(entry.tag, entry.groupId),
|
||||
);
|
||||
const seen = new Set(mergedTags);
|
||||
const seen = new Set(mergedTags.map((tag) => this.getPictureDedupKey(tag)));
|
||||
for (const entry of sourceEntries) {
|
||||
const normalized = this.ensureImageGroupId(entry.tag, entry.groupId);
|
||||
if (seen.has(normalized)) continue;
|
||||
seen.add(normalized);
|
||||
const dedupKey = this.getPictureDedupKey(normalized);
|
||||
if (seen.has(dedupKey)) continue;
|
||||
seen.add(dedupKey);
|
||||
mergedTags.push(normalized);
|
||||
}
|
||||
return mergedTags.join('');
|
||||
@@ -415,9 +420,9 @@ export class FieldGroupingMergeCollaborator {
|
||||
.join('');
|
||||
}
|
||||
const merged = [...keepEntries];
|
||||
const seen = new Set(keepEntries.map((entry) => `${entry.groupId}::${entry.content}`));
|
||||
const seen = new Set(keepEntries.map((entry) => entry.content));
|
||||
for (const entry of sourceEntries) {
|
||||
const key = `${entry.groupId}::${entry.content}`;
|
||||
const key = entry.content;
|
||||
if (seen.has(key)) continue;
|
||||
seen.add(key);
|
||||
merged.push(entry);
|
||||
|
||||
@@ -1,16 +1,36 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { FieldGroupingWorkflow } from './field-grouping-workflow';
|
||||
import type { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types';
|
||||
|
||||
type NoteInfo = {
|
||||
noteId: number;
|
||||
fields: Record<string, { value: string }>;
|
||||
};
|
||||
|
||||
type ManualChoice = {
|
||||
keepNoteId: number;
|
||||
deleteNoteId: number;
|
||||
deleteDuplicate: boolean;
|
||||
cancelled: boolean;
|
||||
};
|
||||
|
||||
type FieldGroupingCallback = (data: {
|
||||
original: KikuDuplicateCardInfo;
|
||||
duplicate: KikuDuplicateCardInfo;
|
||||
}) => Promise<KikuFieldGroupingChoice>;
|
||||
|
||||
function createWorkflowHarness() {
|
||||
const updates: Array<{ noteId: number; fields: Record<string, string> }> = [];
|
||||
const deleted: number[][] = [];
|
||||
const statuses: string[] = [];
|
||||
const mergeCalls: Array<{
|
||||
keepNoteId: number;
|
||||
deleteNoteId: number;
|
||||
keepNoteInfoNoteId: number;
|
||||
deleteNoteInfoNoteId: number;
|
||||
}> = [];
|
||||
let manualChoice: ManualChoice | null = null;
|
||||
|
||||
const deps = {
|
||||
client: {
|
||||
@@ -47,11 +67,28 @@ function createWorkflowHarness() {
|
||||
kikuDeleteDuplicateInAuto: true,
|
||||
}),
|
||||
getCurrentSubtitleText: () => 'subtitle-text',
|
||||
getFieldGroupingCallback: () => null,
|
||||
getFieldGroupingCallback: (): FieldGroupingCallback | null => {
|
||||
const choice = manualChoice;
|
||||
if (choice === null) return null;
|
||||
return async () => choice;
|
||||
},
|
||||
setFieldGroupingCallback: () => undefined,
|
||||
computeFieldGroupingMergedFields: async () => ({
|
||||
Sentence: 'merged sentence',
|
||||
}),
|
||||
computeFieldGroupingMergedFields: async (
|
||||
keepNoteId: number,
|
||||
deleteNoteId: number,
|
||||
keepNoteInfo: NoteInfo,
|
||||
deleteNoteInfo: NoteInfo,
|
||||
) => {
|
||||
mergeCalls.push({
|
||||
keepNoteId,
|
||||
deleteNoteId,
|
||||
keepNoteInfoNoteId: keepNoteInfo.noteId,
|
||||
deleteNoteInfoNoteId: deleteNoteInfo.noteId,
|
||||
});
|
||||
return {
|
||||
Sentence: 'merged sentence',
|
||||
};
|
||||
},
|
||||
extractFields: (fields: Record<string, { value: string }>) => {
|
||||
const out: Record<string, string> = {};
|
||||
for (const [key, value] of Object.entries(fields)) {
|
||||
@@ -77,6 +114,10 @@ function createWorkflowHarness() {
|
||||
updates,
|
||||
deleted,
|
||||
statuses,
|
||||
mergeCalls,
|
||||
setManualChoice: (choice: typeof manualChoice) => {
|
||||
manualChoice = choice;
|
||||
},
|
||||
deps,
|
||||
};
|
||||
}
|
||||
@@ -112,3 +153,31 @@ test('FieldGroupingWorkflow manual mode returns false when callback unavailable'
|
||||
assert.equal(handled, false);
|
||||
assert.equal(harness.updates.length, 0);
|
||||
});
|
||||
|
||||
test('FieldGroupingWorkflow manual keep-new uses new note as merge target and old note as source', async () => {
|
||||
const harness = createWorkflowHarness();
|
||||
harness.setManualChoice({
|
||||
keepNoteId: 2,
|
||||
deleteNoteId: 1,
|
||||
deleteDuplicate: false,
|
||||
cancelled: false,
|
||||
});
|
||||
|
||||
const handled = await harness.workflow.handleManual(1, 2, {
|
||||
noteId: 2,
|
||||
fields: {
|
||||
Expression: { value: 'word-2' },
|
||||
Sentence: { value: 'line-2' },
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(handled, true);
|
||||
assert.deepEqual(harness.mergeCalls, [
|
||||
{
|
||||
keepNoteId: 2,
|
||||
deleteNoteId: 1,
|
||||
keepNoteInfoNoteId: 2,
|
||||
deleteNoteInfoNoteId: 1,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -69,7 +69,6 @@ export class FieldGroupingWorkflow {
|
||||
await this.performMerge(
|
||||
originalNoteId,
|
||||
newNoteId,
|
||||
newNoteInfo,
|
||||
this.getExpression(newNoteInfo),
|
||||
sentenceCardConfig.kikuDeleteDuplicateInAuto,
|
||||
);
|
||||
@@ -112,15 +111,8 @@ export class FieldGroupingWorkflow {
|
||||
|
||||
const keepNoteId = choice.keepNoteId;
|
||||
const deleteNoteId = choice.deleteNoteId;
|
||||
const deleteNoteInfo = deleteNoteId === newNoteId ? newNoteInfo : originalNoteInfo;
|
||||
|
||||
await this.performMerge(
|
||||
keepNoteId,
|
||||
deleteNoteId,
|
||||
deleteNoteInfo,
|
||||
expression,
|
||||
choice.deleteDuplicate,
|
||||
);
|
||||
await this.performMerge(keepNoteId, deleteNoteId, expression, choice.deleteDuplicate);
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.deps.logError('Field grouping manual merge failed:', (error as Error).message);
|
||||
@@ -132,18 +124,22 @@ export class FieldGroupingWorkflow {
|
||||
private async performMerge(
|
||||
keepNoteId: number,
|
||||
deleteNoteId: number,
|
||||
deleteNoteInfo: FieldGroupingWorkflowNoteInfo,
|
||||
expression: string,
|
||||
deleteDuplicate = true,
|
||||
): Promise<void> {
|
||||
const keepNotesInfoResult = await this.deps.client.notesInfo([keepNoteId]);
|
||||
const keepNotesInfo = keepNotesInfoResult as FieldGroupingWorkflowNoteInfo[];
|
||||
if (!keepNotesInfo || keepNotesInfo.length === 0) {
|
||||
const notesInfoResult = await this.deps.client.notesInfo([keepNoteId, deleteNoteId]);
|
||||
const notesInfo = notesInfoResult as FieldGroupingWorkflowNoteInfo[];
|
||||
const keepNoteInfo = notesInfo.find((note) => note.noteId === keepNoteId);
|
||||
const deleteNoteInfo = notesInfo.find((note) => note.noteId === deleteNoteId);
|
||||
if (!keepNoteInfo) {
|
||||
this.deps.logInfo('Keep note not found:', keepNoteId);
|
||||
return;
|
||||
}
|
||||
if (!deleteNoteInfo) {
|
||||
this.deps.logInfo('Delete note not found:', deleteNoteId);
|
||||
return;
|
||||
}
|
||||
|
||||
const keepNoteInfo = keepNotesInfo[0]!;
|
||||
const mergedFields = await this.deps.computeFieldGroupingMergedFields(
|
||||
keepNoteId,
|
||||
deleteNoteId,
|
||||
|
||||
@@ -51,18 +51,10 @@ function createWorkflowHarness() {
|
||||
return out;
|
||||
},
|
||||
findDuplicateNote: async (_expression, _excludeNoteId, _noteInfo) => null,
|
||||
handleFieldGroupingAuto: async (
|
||||
_originalNoteId,
|
||||
_newNoteId,
|
||||
_newNoteInfo,
|
||||
_expression,
|
||||
) => undefined,
|
||||
handleFieldGroupingManual: async (
|
||||
_originalNoteId,
|
||||
_newNoteId,
|
||||
_newNoteInfo,
|
||||
_expression,
|
||||
) => false,
|
||||
handleFieldGroupingAuto: async (_originalNoteId, _newNoteId, _newNoteInfo, _expression) =>
|
||||
undefined,
|
||||
handleFieldGroupingManual: async (_originalNoteId, _newNoteId, _newNoteInfo, _expression) =>
|
||||
false,
|
||||
processSentence: (text: string, _noteFields: Record<string, string>) => text,
|
||||
resolveConfiguredFieldName: (noteInfo: NoteUpdateWorkflowNoteInfo, preferred?: string) => {
|
||||
if (!preferred) return null;
|
||||
|
||||
@@ -91,10 +91,14 @@ export class NoteUpdateWorkflow {
|
||||
this.deps.appendKnownWordsFromNoteInfo(noteInfo);
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
|
||||
const expressionText = fields.expression || fields.word || '';
|
||||
if (!expressionText) {
|
||||
this.deps.logWarn('No expression/word field found in card:', noteId);
|
||||
return;
|
||||
const expressionText = (fields.expression || fields.word || '').trim();
|
||||
const hasExpressionText = expressionText.length > 0;
|
||||
if (!hasExpressionText) {
|
||||
// Some note types omit Expression/Word; still run enrichment updates and skip duplicate checks.
|
||||
this.deps.logWarn(
|
||||
'No expression/word field found in card; skipping duplicate checks but continuing update:',
|
||||
noteId,
|
||||
);
|
||||
}
|
||||
|
||||
const sentenceCardConfig = this.deps.getEffectiveSentenceCardConfig();
|
||||
@@ -103,7 +107,7 @@ export class NoteUpdateWorkflow {
|
||||
sentenceCardConfig.kikuEnabled &&
|
||||
sentenceCardConfig.kikuFieldGrouping !== 'disabled';
|
||||
let duplicateNoteId: number | null = null;
|
||||
if (shouldRunFieldGrouping) {
|
||||
if (shouldRunFieldGrouping && hasExpressionText) {
|
||||
duplicateNoteId = await this.deps.findDuplicateNote(expressionText, noteId, noteInfo);
|
||||
}
|
||||
|
||||
@@ -195,11 +199,11 @@ export class NoteUpdateWorkflow {
|
||||
if (updatePerformed) {
|
||||
await this.deps.client.updateNoteFields(noteId, updatedFields);
|
||||
await this.deps.addConfiguredTagsToNote(noteId);
|
||||
this.deps.logInfo('Updated card fields for:', expressionText);
|
||||
await this.deps.showNotification(noteId, expressionText);
|
||||
this.deps.logInfo('Updated card fields for:', hasExpressionText ? expressionText : noteId);
|
||||
await this.deps.showNotification(noteId, hasExpressionText ? expressionText : noteId);
|
||||
}
|
||||
|
||||
if (shouldRunFieldGrouping && duplicateNoteId !== null) {
|
||||
if (shouldRunFieldGrouping && hasExpressionText && duplicateNoteId !== null) {
|
||||
let noteInfoForGrouping = noteInfo;
|
||||
if (updatePerformed) {
|
||||
const refreshedInfoResult = await this.deps.client.notesInfo([noteId]);
|
||||
|
||||
Reference in New Issue
Block a user