mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-22 12:11:27 -07:00
feat(stats): add v1 immersion stats dashboard (#19)
This commit is contained in:
50
src/anki-connect.test.ts
Normal file
50
src/anki-connect.test.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { AnkiConnectClient } from './anki-connect';
|
||||
|
||||
test('AnkiConnectClient disables keep-alive agents to avoid stale socket retries', () => {
|
||||
const client = new AnkiConnectClient('http://127.0.0.1:8765') as unknown as {
|
||||
client: {
|
||||
defaults: {
|
||||
httpAgent?: { options?: { keepAlive?: boolean } };
|
||||
httpsAgent?: { options?: { keepAlive?: boolean } };
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
assert.equal(client.client.defaults.httpAgent?.options?.keepAlive, false);
|
||||
assert.equal(client.client.defaults.httpsAgent?.options?.keepAlive, false);
|
||||
});
|
||||
|
||||
test('AnkiConnectClient includes action name in retry logs', async () => {
|
||||
const client = new AnkiConnectClient('http://127.0.0.1:8765') as unknown as {
|
||||
client: { post: (url: string, body: unknown, options: unknown) => Promise<unknown> };
|
||||
sleep: (ms: number) => Promise<void>;
|
||||
};
|
||||
let shouldFail = true;
|
||||
client.client = {
|
||||
post: async () => {
|
||||
if (shouldFail) {
|
||||
shouldFail = false;
|
||||
const error = Object.assign(new Error('socket hang up'), { code: 'ECONNRESET' });
|
||||
throw error;
|
||||
}
|
||||
return { data: { result: [], error: null } };
|
||||
},
|
||||
};
|
||||
client.sleep = async () => undefined;
|
||||
|
||||
const originalInfo = console.info;
|
||||
const messages: string[] = [];
|
||||
try {
|
||||
console.info = (...args: unknown[]) => {
|
||||
messages.push(args.map((value) => String(value)).join(' '));
|
||||
};
|
||||
|
||||
await (client as unknown as AnkiConnectClient).invoke('notesInfo', { notes: [1] });
|
||||
|
||||
assert.match(messages.join('\n'), /AnkiConnect notesInfo retry 1\/3 after 200ms delay/);
|
||||
} finally {
|
||||
console.info = originalInfo;
|
||||
}
|
||||
});
|
||||
@@ -43,7 +43,7 @@ export class AnkiConnectClient {
|
||||
|
||||
constructor(url: string) {
|
||||
const httpAgent = new http.Agent({
|
||||
keepAlive: true,
|
||||
keepAlive: false,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: 5,
|
||||
maxFreeSockets: 2,
|
||||
@@ -51,7 +51,7 @@ export class AnkiConnectClient {
|
||||
});
|
||||
|
||||
const httpsAgent = new https.Agent({
|
||||
keepAlive: true,
|
||||
keepAlive: false,
|
||||
keepAliveMsecs: 1000,
|
||||
maxSockets: 5,
|
||||
maxFreeSockets: 2,
|
||||
@@ -106,7 +106,7 @@ export class AnkiConnectClient {
|
||||
try {
|
||||
if (attempt > 0) {
|
||||
const delay = Math.min(this.backoffMs * Math.pow(2, attempt - 1), this.maxBackoffMs);
|
||||
log.info(`AnkiConnect retry ${attempt}/${maxRetries} after ${delay}ms delay`);
|
||||
log.info(`AnkiConnect ${action} retry ${attempt}/${maxRetries} after ${delay}ms delay`);
|
||||
await this.sleep(delay);
|
||||
}
|
||||
|
||||
|
||||
85
src/anki-field-config.ts
Normal file
85
src/anki-field-config.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import type { AnkiConnectConfig } from './types';
|
||||
|
||||
type NoteFieldValue = { value?: string } | string | null | undefined;
|
||||
|
||||
function normalizeFieldName(value: string | null | undefined): string | null {
|
||||
if (typeof value !== 'string') return null;
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length > 0 ? trimmed : null;
|
||||
}
|
||||
|
||||
export function getConfiguredWordFieldName(config?: Pick<AnkiConnectConfig, 'fields'> | null): string {
|
||||
return normalizeFieldName(config?.fields?.word) ?? 'Expression';
|
||||
}
|
||||
|
||||
export function getConfiguredSentenceFieldName(
|
||||
config?: Pick<AnkiConnectConfig, 'fields'> | null,
|
||||
): string {
|
||||
return normalizeFieldName(config?.fields?.sentence) ?? 'Sentence';
|
||||
}
|
||||
|
||||
export function getConfiguredTranslationFieldName(
|
||||
config?: Pick<AnkiConnectConfig, 'fields'> | null,
|
||||
): string {
|
||||
return normalizeFieldName(config?.fields?.translation) ?? 'SelectionText';
|
||||
}
|
||||
|
||||
export function getConfiguredWordFieldCandidates(
|
||||
config?: Pick<AnkiConnectConfig, 'fields'> | null,
|
||||
): string[] {
|
||||
const preferred = getConfiguredWordFieldName(config);
|
||||
const candidates = [preferred, 'Expression', 'Word'];
|
||||
const seen = new Set<string>();
|
||||
return candidates.filter((candidate) => {
|
||||
const key = candidate.toLowerCase();
|
||||
if (seen.has(key)) return false;
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
function coerceFieldValue(value: NoteFieldValue): string {
|
||||
if (typeof value === 'string') return value;
|
||||
if (value && typeof value === 'object' && typeof value.value === 'string') {
|
||||
return value.value;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
export function stripAnkiFieldHtml(value: string): string {
|
||||
return value
|
||||
.replace(/\[sound:[^\]]+\]/gi, ' ')
|
||||
.replace(/<br\s*\/?>/gi, ' ')
|
||||
.replace(/<[^>]+>/g, ' ')
|
||||
.replace(/ /gi, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
export function getPreferredNoteFieldValue(
|
||||
fields: Record<string, NoteFieldValue> | null | undefined,
|
||||
preferredNames: string[],
|
||||
): string {
|
||||
if (!fields) return '';
|
||||
const entries = Object.entries(fields);
|
||||
for (const preferredName of preferredNames) {
|
||||
const preferredKey = preferredName.trim().toLowerCase();
|
||||
if (!preferredKey) continue;
|
||||
const entry = entries.find(([fieldName]) => fieldName.trim().toLowerCase() === preferredKey);
|
||||
if (!entry) continue;
|
||||
const cleaned = stripAnkiFieldHtml(coerceFieldValue(entry[1]));
|
||||
if (cleaned) return cleaned;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
export function getPreferredWordValueFromExtractedFields(
|
||||
fields: Record<string, string>,
|
||||
config?: Pick<AnkiConnectConfig, 'fields'> | null,
|
||||
): string {
|
||||
for (const candidate of getConfiguredWordFieldCandidates(config)) {
|
||||
const value = fields[candidate.toLowerCase()]?.trim();
|
||||
if (value) return value;
|
||||
}
|
||||
return '';
|
||||
}
|
||||
@@ -56,7 +56,7 @@ function createIntegrationTestContext(
|
||||
|
||||
const integration = new AnkiIntegration(
|
||||
{
|
||||
nPlusOne: {
|
||||
knownWords: {
|
||||
highlightEnabled: options.highlightEnabled ?? true,
|
||||
},
|
||||
},
|
||||
@@ -209,6 +209,27 @@ test('AnkiIntegration.refreshKnownWordCache deduplicates concurrent refreshes',
|
||||
}
|
||||
});
|
||||
|
||||
test('AnkiIntegration resolves merged-away note ids to the kept note id', () => {
|
||||
const ctx = createIntegrationTestContext({
|
||||
stateDirPrefix: 'subminer-anki-integration-note-redirect-',
|
||||
});
|
||||
|
||||
try {
|
||||
const integrationWithInternals = ctx.integration as unknown as {
|
||||
rememberMergedNoteIds: (deletedNoteId: number, keptNoteId: number) => void;
|
||||
};
|
||||
integrationWithInternals.rememberMergedNoteIds(111, 222);
|
||||
integrationWithInternals.rememberMergedNoteIds(222, 333);
|
||||
|
||||
assert.equal(ctx.integration.resolveCurrentNoteId(111), 333);
|
||||
assert.equal(ctx.integration.resolveCurrentNoteId(222), 333);
|
||||
assert.equal(ctx.integration.resolveCurrentNoteId(333), 333);
|
||||
assert.equal(ctx.integration.resolveCurrentNoteId(444), 444);
|
||||
} finally {
|
||||
cleanupIntegrationTestContext(ctx);
|
||||
}
|
||||
});
|
||||
|
||||
test('AnkiIntegration does not allocate proxy server when proxy transport is disabled', () => {
|
||||
const integration = new AnkiIntegration(
|
||||
{
|
||||
@@ -229,6 +250,34 @@ test('AnkiIntegration does not allocate proxy server when proxy transport is dis
|
||||
assert.equal(privateState.runtime.proxyServer, null);
|
||||
});
|
||||
|
||||
test('AnkiIntegration marks partial update notifications as failures in OSD mode', async () => {
|
||||
const osdMessages: string[] = [];
|
||||
const integration = new AnkiIntegration(
|
||||
{
|
||||
behavior: {
|
||||
notificationType: 'osd',
|
||||
},
|
||||
},
|
||||
{} as never,
|
||||
{} as never,
|
||||
(text) => {
|
||||
osdMessages.push(text);
|
||||
},
|
||||
);
|
||||
|
||||
await (
|
||||
integration as unknown as {
|
||||
showNotification: (
|
||||
noteId: number,
|
||||
label: string | number,
|
||||
errorSuffix?: string,
|
||||
) => Promise<void>;
|
||||
}
|
||||
).showNotification(42, 'taberu', 'image failed');
|
||||
|
||||
assert.deepEqual(osdMessages, ['x Updated card: taberu (image failed)']);
|
||||
});
|
||||
|
||||
test('FieldGroupingMergeCollaborator synchronizes ExpressionAudio from merged SentenceAudio', async () => {
|
||||
const collaborator = createFieldGroupingMergeCollaborator();
|
||||
|
||||
|
||||
@@ -31,12 +31,19 @@ import {
|
||||
NPlusOneMatchMode,
|
||||
} from './types';
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from './config';
|
||||
import {
|
||||
getConfiguredWordFieldCandidates,
|
||||
getConfiguredWordFieldName,
|
||||
getPreferredWordValueFromExtractedFields,
|
||||
} from './anki-field-config';
|
||||
import { createLogger } from './logger';
|
||||
import {
|
||||
createUiFeedbackState,
|
||||
beginUpdateProgress,
|
||||
clearUpdateProgress,
|
||||
endUpdateProgress,
|
||||
showStatusNotification,
|
||||
showUpdateResult,
|
||||
withUpdateProgress,
|
||||
UiFeedbackState,
|
||||
} from './anki-integration/ui-feedback';
|
||||
@@ -49,6 +56,7 @@ import { FieldGroupingService } from './anki-integration/field-grouping';
|
||||
import { FieldGroupingMergeCollaborator } from './anki-integration/field-grouping-merge';
|
||||
import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
|
||||
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
|
||||
import { resolveAnimatedImageLeadInSeconds } from './anki-integration/animated-image-sync';
|
||||
import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime';
|
||||
|
||||
const log = createLogger('anki').child('integration');
|
||||
@@ -137,6 +145,8 @@ export class AnkiIntegration {
|
||||
private fieldGroupingWorkflow: FieldGroupingWorkflow;
|
||||
private runtime: AnkiIntegrationRuntime;
|
||||
private aiConfig: AiConfig;
|
||||
private recordCardsMinedCallback: ((count: number, noteIds?: number[]) => void) | null = null;
|
||||
private noteIdRedirects = new Map<number, number>();
|
||||
|
||||
constructor(
|
||||
config: AnkiConnectConfig,
|
||||
@@ -150,6 +160,7 @@ export class AnkiIntegration {
|
||||
}) => Promise<KikuFieldGroupingChoice>,
|
||||
knownWordCacheStatePath?: string,
|
||||
aiConfig: AiConfig = {},
|
||||
recordCardsMined?: (count: number, noteIds?: number[]) => void,
|
||||
) {
|
||||
this.config = normalizeAnkiIntegrationConfig(config);
|
||||
this.aiConfig = { ...aiConfig };
|
||||
@@ -160,6 +171,7 @@ export class AnkiIntegration {
|
||||
this.osdCallback = osdCallback || null;
|
||||
this.notificationCallback = notificationCallback || null;
|
||||
this.fieldGroupingCallback = fieldGroupingCallback || null;
|
||||
this.recordCardsMinedCallback = recordCardsMined ?? null;
|
||||
this.knownWordCache = this.createKnownWordCache(knownWordCacheStatePath);
|
||||
this.pollingRunner = this.createPollingRunner();
|
||||
this.cardCreationService = this.createCardCreationService();
|
||||
@@ -181,12 +193,31 @@ export class AnkiIntegration {
|
||||
this.resolveNoteFieldName(noteInfo, preferredName),
|
||||
extractFields: (fields) => this.extractFields(fields),
|
||||
processSentence: (mpvSentence, noteFields) => this.processSentence(mpvSentence, noteFields),
|
||||
generateMediaForMerge: () => this.generateMediaForMerge(),
|
||||
generateMediaForMerge: (noteInfo) => this.generateMediaForMerge(noteInfo),
|
||||
warnFieldParseOnce: (fieldName, reason, detail) =>
|
||||
this.warnFieldParseOnce(fieldName, reason, detail),
|
||||
});
|
||||
}
|
||||
|
||||
private recordCardsMinedSafely(
|
||||
count: number,
|
||||
noteIds: number[] | undefined,
|
||||
source: string,
|
||||
): void {
|
||||
if (!this.recordCardsMinedCallback) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
this.recordCardsMinedCallback(count, noteIds);
|
||||
} catch (error) {
|
||||
log.warn(
|
||||
`recordCardsMined callback failed during ${source}:`,
|
||||
(error as Error).message,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private createKnownWordCache(knownWordCacheStatePath?: string): KnownWordCacheManager {
|
||||
return new KnownWordCacheManager({
|
||||
client: {
|
||||
@@ -208,6 +239,9 @@ export class AnkiIntegration {
|
||||
(await this.client.findNotes(query, options)) as number[],
|
||||
shouldAutoUpdateNewCards: () => this.config.behavior?.autoUpdateNewCards !== false,
|
||||
processNewCard: (noteId) => this.processNewCard(noteId),
|
||||
recordCardsAdded: (count, noteIds) => {
|
||||
this.recordCardsMinedSafely(count, noteIds, 'polling');
|
||||
},
|
||||
isUpdateInProgress: () => this.updateInProgress,
|
||||
setUpdateInProgress: (value) => {
|
||||
this.updateInProgress = value;
|
||||
@@ -229,6 +263,9 @@ export class AnkiIntegration {
|
||||
return new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => this.config.behavior?.autoUpdateNewCards !== false,
|
||||
processNewCard: (noteId: number) => this.processNewCard(noteId),
|
||||
recordCardsAdded: (count, noteIds) => {
|
||||
this.recordCardsMinedSafely(count, noteIds, 'proxy');
|
||||
},
|
||||
getDeck: () => this.config.deck,
|
||||
findNotes: async (query, options) =>
|
||||
(await this.client.findNotes(query, options)) as number[],
|
||||
@@ -271,6 +308,7 @@ export class AnkiIntegration {
|
||||
storeMediaFile: (filename, data) => this.client.storeMediaFile(filename, data),
|
||||
findNotes: async (query, options) =>
|
||||
(await this.client.findNotes(query, options)) as number[],
|
||||
retrieveMediaFile: (filename) => this.client.retrieveMediaFile(filename),
|
||||
},
|
||||
mediaGenerator: {
|
||||
generateAudio: (videoPath, startTime, endTime, audioPadding, audioStreamIndex) =>
|
||||
@@ -293,6 +331,8 @@ export class AnkiIntegration {
|
||||
),
|
||||
},
|
||||
showOsdNotification: (text: string) => this.showOsdNotification(text),
|
||||
showUpdateResult: (message: string, success: boolean) =>
|
||||
this.showUpdateResult(message, success),
|
||||
showStatusNotification: (message: string) => this.showStatusNotification(message),
|
||||
showNotification: (noteId, label, errorSuffix) =>
|
||||
this.showNotification(noteId, label, errorSuffix),
|
||||
@@ -304,6 +344,7 @@ export class AnkiIntegration {
|
||||
this.resolveConfiguredFieldName(noteInfo, ...preferredNames),
|
||||
resolveNoteFieldName: (noteInfo, preferredName) =>
|
||||
this.resolveNoteFieldName(noteInfo, preferredName),
|
||||
getAnimatedImageLeadInSeconds: (noteInfo) => this.getAnimatedImageLeadInSeconds(noteInfo),
|
||||
extractFields: (fields) => this.extractFields(fields),
|
||||
processSentence: (mpvSentence, noteFields) => this.processSentence(mpvSentence, noteFields),
|
||||
setCardTypeFields: (updatedFields, availableFieldNames, cardKind) =>
|
||||
@@ -322,12 +363,16 @@ export class AnkiIntegration {
|
||||
trackLastAddedNoteId: (noteId) => {
|
||||
this.previousNoteIds.add(noteId);
|
||||
},
|
||||
recordCardsMinedCallback: (count, noteIds) => {
|
||||
this.recordCardsMinedSafely(count, noteIds, 'card creation');
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private createFieldGroupingService(): FieldGroupingService {
|
||||
return new FieldGroupingService({
|
||||
getEffectiveSentenceCardConfig: () => this.getEffectiveSentenceCardConfig(),
|
||||
getConfig: () => this.config,
|
||||
isUpdateInProgress: () => this.updateInProgress,
|
||||
getDeck: () => this.config.deck,
|
||||
withUpdateProgress: <T>(initialMessage: string, action: () => Promise<T>) =>
|
||||
@@ -391,12 +436,13 @@ export class AnkiIntegration {
|
||||
this.resolveConfiguredFieldName(noteInfo, ...preferredNames),
|
||||
getResolvedSentenceAudioFieldName: (noteInfo) =>
|
||||
this.getResolvedSentenceAudioFieldName(noteInfo),
|
||||
getAnimatedImageLeadInSeconds: (noteInfo) => this.getAnimatedImageLeadInSeconds(noteInfo),
|
||||
mergeFieldValue: (existing, newValue, overwrite) =>
|
||||
this.mergeFieldValue(existing, newValue, overwrite),
|
||||
generateAudioFilename: () => this.generateAudioFilename(),
|
||||
generateAudio: () => this.generateAudio(),
|
||||
generateImageFilename: () => this.generateImageFilename(),
|
||||
generateImage: () => this.generateImage(),
|
||||
generateImage: (animatedLeadInSeconds) => this.generateImage(animatedLeadInSeconds),
|
||||
formatMiscInfoPattern: (fallbackFilename, startTimeSeconds) =>
|
||||
this.formatMiscInfoPattern(fallbackFilename, startTimeSeconds),
|
||||
addConfiguredTagsToNote: (noteId) => this.addConfiguredTagsToNote(noteId),
|
||||
@@ -442,6 +488,9 @@ export class AnkiIntegration {
|
||||
removeTrackedNoteId: (noteId) => {
|
||||
this.previousNoteIds.delete(noteId);
|
||||
},
|
||||
rememberMergedNoteIds: (deletedNoteId, keptNoteId) => {
|
||||
this.rememberMergedNoteIds(deletedNoteId, keptNoteId);
|
||||
},
|
||||
showStatusNotification: (message) => this.showStatusNotification(message),
|
||||
showNotification: (noteId, label) => this.showNotification(noteId, label),
|
||||
showOsdNotification: (message) => this.showOsdNotification(message),
|
||||
@@ -456,11 +505,11 @@ export class AnkiIntegration {
|
||||
}
|
||||
|
||||
getKnownWordMatchMode(): NPlusOneMatchMode {
|
||||
return this.config.nPlusOne?.matchMode ?? DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne.matchMode;
|
||||
return this.config.knownWords?.matchMode ?? DEFAULT_ANKI_CONNECT_CONFIG.knownWords.matchMode;
|
||||
}
|
||||
|
||||
private isKnownWordCacheEnabled(): boolean {
|
||||
return this.config.nPlusOne?.highlightEnabled === true;
|
||||
return this.config.knownWords?.highlightEnabled === true;
|
||||
}
|
||||
|
||||
private getConfiguredAnkiTags(): string[] {
|
||||
@@ -618,7 +667,7 @@ export class AnkiIntegration {
|
||||
);
|
||||
}
|
||||
|
||||
private async generateImage(): Promise<Buffer | null> {
|
||||
private async generateImage(animatedLeadInSeconds = 0): Promise<Buffer | null> {
|
||||
if (!this.mpvClient || !this.mpvClient.currentVideoPath) {
|
||||
return null;
|
||||
}
|
||||
@@ -646,6 +695,7 @@ export class AnkiIntegration {
|
||||
maxWidth: this.config.media?.animatedMaxWidth,
|
||||
maxHeight: this.config.media?.animatedMaxHeight,
|
||||
crf: this.config.media?.animatedCrf,
|
||||
leadingStillDuration: animatedLeadInSeconds,
|
||||
},
|
||||
);
|
||||
} else {
|
||||
@@ -749,6 +799,12 @@ export class AnkiIntegration {
|
||||
});
|
||||
}
|
||||
|
||||
private clearUpdateProgress(): void {
|
||||
clearUpdateProgress(this.uiFeedbackState, (timer) => {
|
||||
clearInterval(timer);
|
||||
});
|
||||
}
|
||||
|
||||
private async withUpdateProgress<T>(
|
||||
initialMessage: string,
|
||||
action: () => Promise<T>,
|
||||
@@ -879,7 +935,9 @@ export class AnkiIntegration {
|
||||
const type = this.config.behavior?.notificationType || 'osd';
|
||||
|
||||
if (type === 'osd' || type === 'both') {
|
||||
this.showOsdNotification(message);
|
||||
this.showUpdateResult(message, errorSuffix === undefined);
|
||||
} else {
|
||||
this.clearUpdateProgress();
|
||||
}
|
||||
|
||||
if ((type === 'system' || type === 'both') && this.notificationCallback) {
|
||||
@@ -914,6 +972,21 @@ export class AnkiIntegration {
|
||||
}
|
||||
}
|
||||
|
||||
private showUpdateResult(message: string, success: boolean): void {
|
||||
showUpdateResult(
|
||||
this.uiFeedbackState,
|
||||
{
|
||||
clearProgressTimer: (timer) => {
|
||||
clearInterval(timer);
|
||||
},
|
||||
showOsdNotification: (text) => {
|
||||
this.showOsdNotification(text);
|
||||
},
|
||||
},
|
||||
{ message, success },
|
||||
);
|
||||
}
|
||||
|
||||
private mergeFieldValue(existing: string, newValue: string, overwrite: boolean): string {
|
||||
if (overwrite || !existing.trim()) {
|
||||
return newValue;
|
||||
@@ -963,6 +1036,7 @@ export class AnkiIntegration {
|
||||
findNotes: async (query, options) => (await this.client.findNotes(query, options)) as unknown,
|
||||
notesInfo: async (noteIds) => (await this.client.notesInfo(noteIds)) as unknown,
|
||||
getDeck: () => this.config.deck,
|
||||
getWordFieldCandidates: () => this.getConfiguredWordFieldCandidates(),
|
||||
resolveFieldName: (info, preferredName) => this.resolveNoteFieldName(info, preferredName),
|
||||
logInfo: (message) => {
|
||||
log.info(message);
|
||||
@@ -988,7 +1062,26 @@ export class AnkiIntegration {
|
||||
);
|
||||
}
|
||||
|
||||
private async generateMediaForMerge(): Promise<{
|
||||
private getConfiguredWordFieldName(): string {
|
||||
return getConfiguredWordFieldName(this.config);
|
||||
}
|
||||
|
||||
private getConfiguredWordFieldCandidates(): string[] {
|
||||
return getConfiguredWordFieldCandidates(this.config);
|
||||
}
|
||||
|
||||
private async getAnimatedImageLeadInSeconds(noteInfo: NoteInfo): Promise<number> {
|
||||
return resolveAnimatedImageLeadInSeconds({
|
||||
config: this.config,
|
||||
noteInfo,
|
||||
resolveConfiguredFieldName: (candidateNoteInfo, ...preferredNames) =>
|
||||
this.resolveConfiguredFieldName(candidateNoteInfo, ...preferredNames),
|
||||
retrieveMediaFileBase64: (filename) => this.client.retrieveMediaFile(filename),
|
||||
logWarn: (message, ...args) => log.warn(message, ...args),
|
||||
});
|
||||
}
|
||||
|
||||
private async generateMediaForMerge(noteInfo?: NoteInfo): Promise<{
|
||||
audioField?: string;
|
||||
audioValue?: string;
|
||||
imageField?: string;
|
||||
@@ -1025,8 +1118,11 @@ export class AnkiIntegration {
|
||||
|
||||
if (this.config.media?.generateImage && this.mpvClient?.currentVideoPath) {
|
||||
try {
|
||||
const animatedLeadInSeconds = noteInfo
|
||||
? await this.getAnimatedImageLeadInSeconds(noteInfo)
|
||||
: 0;
|
||||
const imageFilename = this.generateImageFilename();
|
||||
const imageBuffer = await this.generateImage();
|
||||
const imageBuffer = await this.generateImage(animatedLeadInSeconds);
|
||||
if (imageBuffer) {
|
||||
await this.client.storeMediaFile(imageFilename, imageBuffer);
|
||||
result.imageField = this.config.fields?.image || DEFAULT_ANKI_CONNECT_CONFIG.fields.image;
|
||||
@@ -1112,4 +1208,38 @@ export class AnkiIntegration {
|
||||
this.stop();
|
||||
this.mediaGenerator.cleanup();
|
||||
}
|
||||
|
||||
setRecordCardsMinedCallback(
|
||||
callback: ((count: number, noteIds?: number[]) => void) | null,
|
||||
): void {
|
||||
this.recordCardsMinedCallback = callback;
|
||||
}
|
||||
|
||||
resolveCurrentNoteId(noteId: number): number {
|
||||
let resolved = noteId;
|
||||
const seen = new Set<number>();
|
||||
while (this.noteIdRedirects.has(resolved) && !seen.has(resolved)) {
|
||||
seen.add(resolved);
|
||||
resolved = this.noteIdRedirects.get(resolved)!;
|
||||
}
|
||||
return resolved;
|
||||
}
|
||||
|
||||
private rememberMergedNoteIds(deletedNoteId: number, keptNoteId: number): void {
|
||||
const resolvedKeepNoteId = this.resolveCurrentNoteId(keptNoteId);
|
||||
const visited = new Set<number>([deletedNoteId]);
|
||||
let current = deletedNoteId;
|
||||
|
||||
while (true) {
|
||||
this.noteIdRedirects.set(current, resolvedKeepNoteId);
|
||||
const next = Array.from(this.noteIdRedirects.entries()).find(
|
||||
([, targetNoteId]) => targetNoteId === current,
|
||||
)?.[0];
|
||||
if (next === undefined || visited.has(next)) {
|
||||
break;
|
||||
}
|
||||
visited.add(next);
|
||||
current = next;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
82
src/anki-integration/animated-image-sync.test.ts
Normal file
82
src/anki-integration/animated-image-sync.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { resolveAnimatedImageLeadInSeconds, extractSoundFilenames } from './animated-image-sync';
|
||||
|
||||
test('extractSoundFilenames returns ordered sound filenames from an Anki field value', () => {
|
||||
assert.deepEqual(
|
||||
extractSoundFilenames('before [sound:word.mp3] middle [sound:alt.ogg] after'),
|
||||
['word.mp3', 'alt.ogg'],
|
||||
);
|
||||
});
|
||||
|
||||
test('resolveAnimatedImageLeadInSeconds sums configured word audio durations for animated images', async () => {
|
||||
const leadInSeconds = await resolveAnimatedImageLeadInSeconds({
|
||||
config: {
|
||||
fields: {
|
||||
audio: 'ExpressionAudio',
|
||||
},
|
||||
media: {
|
||||
imageType: 'avif',
|
||||
syncAnimatedImageToWordAudio: true,
|
||||
},
|
||||
},
|
||||
noteInfo: {
|
||||
noteId: 42,
|
||||
fields: {
|
||||
ExpressionAudio: {
|
||||
value: '[sound:word.mp3][sound:alt.ogg]',
|
||||
},
|
||||
},
|
||||
},
|
||||
resolveConfiguredFieldName: (noteInfo, ...preferredNames) => {
|
||||
for (const preferredName of preferredNames) {
|
||||
if (!preferredName) continue;
|
||||
const resolved = Object.keys(noteInfo.fields).find(
|
||||
(fieldName) => fieldName.toLowerCase() === preferredName.toLowerCase(),
|
||||
);
|
||||
if (resolved) return resolved;
|
||||
}
|
||||
return null;
|
||||
},
|
||||
retrieveMediaFileBase64: async (filename) =>
|
||||
filename === 'word.mp3' ? 'd29yZA==' : filename === 'alt.ogg' ? 'YWx0' : '',
|
||||
probeAudioDurationSeconds: async (_buffer, filename) =>
|
||||
filename === 'word.mp3' ? 0.41 : filename === 'alt.ogg' ? 0.84 : null,
|
||||
logWarn: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(leadInSeconds, 1.25);
|
||||
});
|
||||
|
||||
test('resolveAnimatedImageLeadInSeconds falls back to zero when sync is disabled', async () => {
|
||||
const leadInSeconds = await resolveAnimatedImageLeadInSeconds({
|
||||
config: {
|
||||
fields: {
|
||||
audio: 'ExpressionAudio',
|
||||
},
|
||||
media: {
|
||||
imageType: 'avif',
|
||||
syncAnimatedImageToWordAudio: false,
|
||||
},
|
||||
},
|
||||
noteInfo: {
|
||||
noteId: 42,
|
||||
fields: {
|
||||
ExpressionAudio: {
|
||||
value: '[sound:word.mp3]',
|
||||
},
|
||||
},
|
||||
},
|
||||
resolveConfiguredFieldName: () => 'ExpressionAudio',
|
||||
retrieveMediaFileBase64: async () => {
|
||||
throw new Error('should not be called');
|
||||
},
|
||||
probeAudioDurationSeconds: async () => {
|
||||
throw new Error('should not be called');
|
||||
},
|
||||
logWarn: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(leadInSeconds, 0);
|
||||
});
|
||||
133
src/anki-integration/animated-image-sync.ts
Normal file
133
src/anki-integration/animated-image-sync.ts
Normal file
@@ -0,0 +1,133 @@
|
||||
import { execFile as nodeExecFile } from 'node:child_process';
|
||||
import * as fs from 'node:fs';
|
||||
import * as os from 'node:os';
|
||||
import * as path from 'node:path';
|
||||
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
||||
import type { AnkiConnectConfig } from '../types';
|
||||
|
||||
type NoteInfoLike = {
|
||||
noteId: number;
|
||||
fields: Record<string, { value: string }>;
|
||||
};
|
||||
|
||||
interface ResolveAnimatedImageLeadInSecondsArgs<TNoteInfo extends NoteInfoLike> {
|
||||
config: Pick<AnkiConnectConfig, 'fields' | 'media'>;
|
||||
noteInfo: TNoteInfo;
|
||||
resolveConfiguredFieldName: (
|
||||
noteInfo: TNoteInfo,
|
||||
...preferredNames: (string | undefined)[]
|
||||
) => string | null;
|
||||
retrieveMediaFileBase64: (filename: string) => Promise<string>;
|
||||
probeAudioDurationSeconds?: (buffer: Buffer, filename: string) => Promise<number | null>;
|
||||
logWarn?: (message: string, ...args: unknown[]) => void;
|
||||
}
|
||||
|
||||
interface ProbeAudioDurationDeps {
|
||||
execFile?: typeof nodeExecFile;
|
||||
mkdtempSync?: typeof fs.mkdtempSync;
|
||||
writeFileSync?: typeof fs.writeFileSync;
|
||||
rmSync?: typeof fs.rmSync;
|
||||
}
|
||||
|
||||
export function extractSoundFilenames(value: string): string[] {
|
||||
const matches = value.matchAll(/\[sound:([^\]]+)\]/gi);
|
||||
return Array.from(matches, (match) => match[1]?.trim() || '').filter((value) => value.length > 0);
|
||||
}
|
||||
|
||||
function shouldSyncAnimatedImageToWordAudio(config: Pick<AnkiConnectConfig, 'media'>): boolean {
|
||||
return (
|
||||
config.media?.imageType === 'avif' && config.media?.syncAnimatedImageToWordAudio !== false
|
||||
);
|
||||
}
|
||||
|
||||
export async function probeAudioDurationSeconds(
|
||||
buffer: Buffer,
|
||||
filename: string,
|
||||
deps: ProbeAudioDurationDeps = {},
|
||||
): Promise<number | null> {
|
||||
const execFile = deps.execFile ?? nodeExecFile;
|
||||
const mkdtempSync = deps.mkdtempSync ?? fs.mkdtempSync;
|
||||
const writeFileSync = deps.writeFileSync ?? fs.writeFileSync;
|
||||
const rmSync = deps.rmSync ?? fs.rmSync;
|
||||
|
||||
const tempDir = mkdtempSync(path.join(os.tmpdir(), 'subminer-audio-probe-'));
|
||||
const ext = path.extname(filename) || '.bin';
|
||||
const tempPath = path.join(tempDir, `probe${ext}`);
|
||||
writeFileSync(tempPath, buffer);
|
||||
|
||||
return new Promise((resolve) => {
|
||||
execFile(
|
||||
'ffprobe',
|
||||
[
|
||||
'-v',
|
||||
'error',
|
||||
'-show_entries',
|
||||
'format=duration',
|
||||
'-of',
|
||||
'default=noprint_wrappers=1:nokey=1',
|
||||
tempPath,
|
||||
],
|
||||
(error, stdout) => {
|
||||
try {
|
||||
if (error) {
|
||||
resolve(null);
|
||||
return;
|
||||
}
|
||||
|
||||
const durationSeconds = Number.parseFloat((stdout || '').trim());
|
||||
resolve(Number.isFinite(durationSeconds) && durationSeconds > 0 ? durationSeconds : null);
|
||||
} finally {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function resolveAnimatedImageLeadInSeconds<TNoteInfo extends NoteInfoLike>({
|
||||
config,
|
||||
noteInfo,
|
||||
resolveConfiguredFieldName,
|
||||
retrieveMediaFileBase64,
|
||||
probeAudioDurationSeconds: probeDuration = probeAudioDurationSeconds,
|
||||
logWarn,
|
||||
}: ResolveAnimatedImageLeadInSecondsArgs<TNoteInfo>): Promise<number> {
|
||||
if (!shouldSyncAnimatedImageToWordAudio(config)) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const wordAudioFieldName = resolveConfiguredFieldName(
|
||||
noteInfo,
|
||||
config.fields?.audio,
|
||||
DEFAULT_ANKI_CONNECT_CONFIG.fields.audio,
|
||||
);
|
||||
if (!wordAudioFieldName) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const wordAudioValue = noteInfo.fields[wordAudioFieldName]?.value || '';
|
||||
const filenames = extractSoundFilenames(wordAudioValue);
|
||||
if (filenames.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
let totalLeadInSeconds = 0;
|
||||
for (const filename of filenames) {
|
||||
const encoded = await retrieveMediaFileBase64(filename);
|
||||
if (!encoded) {
|
||||
logWarn?.('Animated image sync skipped: failed to retrieve word audio', filename);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const durationSeconds = await probeDuration(Buffer.from(encoded, 'base64'), filename);
|
||||
if (!(typeof durationSeconds === 'number' && Number.isFinite(durationSeconds))) {
|
||||
logWarn?.('Animated image sync skipped: failed to probe word audio duration', filename);
|
||||
return 0;
|
||||
}
|
||||
|
||||
totalLeadInSeconds += durationSeconds;
|
||||
}
|
||||
|
||||
return totalLeadInSeconds;
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import http from 'node:http';
|
||||
import { once } from 'node:events';
|
||||
import test from 'node:test';
|
||||
import { AnkiConnectProxyServer } from './anki-connect-proxy';
|
||||
|
||||
@@ -17,11 +19,15 @@ async function waitForCondition(
|
||||
|
||||
test('proxy enqueues addNote result for enrichment', async () => {
|
||||
const processed: number[] = [];
|
||||
const recordedCards: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
recordCardsAdded: (count) => {
|
||||
recordedCards.push(count);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
@@ -38,6 +44,7 @@ test('proxy enqueues addNote result for enrichment', async () => {
|
||||
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(processed, [42]);
|
||||
assert.deepEqual(recordedCards, [1]);
|
||||
});
|
||||
|
||||
test('proxy enqueues addNote bare numeric response for enrichment', async () => {
|
||||
@@ -64,12 +71,16 @@ test('proxy enqueues addNote bare numeric response for enrichment', async () =>
|
||||
|
||||
test('proxy de-duplicates addNotes IDs within the same response', async () => {
|
||||
const processed: number[] = [];
|
||||
const recordedCards: number[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
await new Promise((resolve) => setTimeout(resolve, 5));
|
||||
},
|
||||
recordCardsAdded: (count) => {
|
||||
recordedCards.push(count);
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
@@ -86,6 +97,7 @@ test('proxy de-duplicates addNotes IDs within the same response', async () => {
|
||||
|
||||
await waitForCondition(() => processed.length === 2);
|
||||
assert.deepEqual(processed, [101, 102]);
|
||||
assert.deepEqual(recordedCards, [2]);
|
||||
});
|
||||
|
||||
test('proxy enqueues note IDs from multi action addNote/addNotes results', async () => {
|
||||
@@ -277,12 +289,16 @@ test('proxy does not fallback-enqueue latest note for multi requests without add
|
||||
|
||||
test('proxy fallback-enqueues latest note for addNote responses without note IDs and escapes deck quotes', async () => {
|
||||
const processed: number[] = [];
|
||||
const recordedCards: number[] = [];
|
||||
const findNotesQueries: string[] = [];
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
},
|
||||
recordCardsAdded: (count) => {
|
||||
recordedCards.push(count);
|
||||
},
|
||||
getDeck: () => 'My "Japanese" Deck',
|
||||
findNotes: async (query) => {
|
||||
findNotesQueries.push(query);
|
||||
@@ -305,6 +321,84 @@ test('proxy fallback-enqueues latest note for addNote responses without note IDs
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(findNotesQueries, ['"deck:My \\"Japanese\\" Deck" added:1']);
|
||||
assert.deepEqual(processed, [501]);
|
||||
assert.deepEqual(recordedCards, [1]);
|
||||
});
|
||||
|
||||
test('proxy returns addNote response without waiting for background enrichment', async () => {
|
||||
const processed: number[] = [];
|
||||
let releaseProcessing: (() => void) | undefined;
|
||||
const processingGate = new Promise<void>((resolve) => {
|
||||
releaseProcessing = resolve;
|
||||
});
|
||||
|
||||
const upstream = http.createServer((req, res) => {
|
||||
assert.equal(req.method, 'POST');
|
||||
res.statusCode = 200;
|
||||
res.setHeader('content-type', 'application/json');
|
||||
res.end(JSON.stringify({ result: 42, error: null }));
|
||||
});
|
||||
upstream.listen(0, '127.0.0.1');
|
||||
await once(upstream, 'listening');
|
||||
const upstreamAddress = upstream.address();
|
||||
assert.ok(upstreamAddress && typeof upstreamAddress === 'object');
|
||||
const upstreamPort = upstreamAddress.port;
|
||||
|
||||
const proxy = new AnkiConnectProxyServer({
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async (noteId) => {
|
||||
processed.push(noteId);
|
||||
await processingGate;
|
||||
},
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
logError: () => undefined,
|
||||
});
|
||||
|
||||
try {
|
||||
proxy.start({
|
||||
host: '127.0.0.1',
|
||||
port: 0,
|
||||
upstreamUrl: `http://127.0.0.1:${upstreamPort}`,
|
||||
});
|
||||
|
||||
const proxyServer = (
|
||||
proxy as unknown as {
|
||||
server: http.Server | null;
|
||||
}
|
||||
).server;
|
||||
assert.ok(proxyServer);
|
||||
if (!proxyServer.listening) {
|
||||
await once(proxyServer, 'listening');
|
||||
}
|
||||
const proxyAddress = proxyServer.address();
|
||||
assert.ok(proxyAddress && typeof proxyAddress === 'object');
|
||||
const proxyPort = proxyAddress.port;
|
||||
|
||||
const response = await Promise.race([
|
||||
fetch(`http://127.0.0.1:${proxyPort}`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ action: 'addNote', version: 6, params: {} }),
|
||||
}),
|
||||
new Promise<never>((_, reject) => {
|
||||
setTimeout(() => reject(new Error('Timed out waiting for proxy response')), 500);
|
||||
}),
|
||||
]);
|
||||
|
||||
assert.equal(response.status, 200);
|
||||
assert.deepEqual(await response.json(), { result: 42, error: null });
|
||||
await waitForCondition(() => processed.length === 1);
|
||||
assert.deepEqual(processed, [42]);
|
||||
} finally {
|
||||
if (releaseProcessing) {
|
||||
releaseProcessing();
|
||||
}
|
||||
proxy.stop();
|
||||
upstream.close();
|
||||
await once(upstream, 'close');
|
||||
}
|
||||
});
|
||||
|
||||
test('proxy detects self-referential loop configuration', () => {
|
||||
|
||||
@@ -15,6 +15,7 @@ interface AnkiConnectEnvelope {
|
||||
export interface AnkiConnectProxyServerDeps {
|
||||
shouldAutoUpdateNewCards: () => boolean;
|
||||
processNewCard: (noteId: number) => Promise<void>;
|
||||
recordCardsAdded?: (count: number, noteIds: number[]) => void;
|
||||
getDeck?: () => string | undefined;
|
||||
findNotes?: (
|
||||
query: string,
|
||||
@@ -332,12 +333,14 @@ export class AnkiConnectProxyServer {
|
||||
|
||||
private enqueueNotes(noteIds: number[]): void {
|
||||
let enqueuedCount = 0;
|
||||
const acceptedIds: number[] = [];
|
||||
for (const noteId of noteIds) {
|
||||
if (this.pendingNoteIdSet.has(noteId) || this.inFlightNoteIds.has(noteId)) {
|
||||
continue;
|
||||
}
|
||||
this.pendingNoteIds.push(noteId);
|
||||
this.pendingNoteIdSet.add(noteId);
|
||||
acceptedIds.push(noteId);
|
||||
enqueuedCount += 1;
|
||||
}
|
||||
|
||||
@@ -345,6 +348,7 @@ export class AnkiConnectProxyServer {
|
||||
return;
|
||||
}
|
||||
|
||||
this.deps.recordCardsAdded?.(enqueuedCount, acceptedIds);
|
||||
this.deps.logInfo(`[anki-proxy] Enqueued ${enqueuedCount} note(s) for enrichment`);
|
||||
this.processQueue();
|
||||
}
|
||||
|
||||
285
src/anki-integration/card-creation.test.ts
Normal file
285
src/anki-integration/card-creation.test.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { CardCreationService } from './card-creation';
|
||||
import type { AnkiConnectConfig } from '../types';
|
||||
|
||||
test('CardCreationService counts locally created sentence cards', async () => {
|
||||
const minedCards: Array<{ count: number; noteIds?: number[] }> = [];
|
||||
const service = new CardCreationService({
|
||||
getConfig: () =>
|
||||
({
|
||||
deck: 'Mining',
|
||||
fields: {
|
||||
sentence: 'Sentence',
|
||||
audio: 'SentenceAudio',
|
||||
},
|
||||
media: {
|
||||
generateAudio: false,
|
||||
generateImage: false,
|
||||
},
|
||||
behavior: {},
|
||||
ai: false,
|
||||
}) as AnkiConnectConfig,
|
||||
getAiConfig: () => ({}),
|
||||
getTimingTracker: () => ({}) as never,
|
||||
getMpvClient: () =>
|
||||
({
|
||||
currentVideoPath: '/video.mp4',
|
||||
currentSubText: '字幕',
|
||||
currentSubStart: 1,
|
||||
currentSubEnd: 2,
|
||||
currentTimePos: 1.5,
|
||||
currentAudioStreamIndex: 0,
|
||||
}) as never,
|
||||
client: {
|
||||
addNote: async () => 42,
|
||||
addTags: async () => undefined,
|
||||
notesInfo: async () => [],
|
||||
updateNoteFields: async () => undefined,
|
||||
storeMediaFile: async () => undefined,
|
||||
findNotes: async () => [],
|
||||
retrieveMediaFile: async () => '',
|
||||
},
|
||||
mediaGenerator: {
|
||||
generateAudio: async () => null,
|
||||
generateScreenshot: async () => null,
|
||||
generateAnimatedImage: async () => null,
|
||||
},
|
||||
showOsdNotification: () => undefined,
|
||||
showUpdateResult: () => undefined,
|
||||
showStatusNotification: () => undefined,
|
||||
showNotification: async () => undefined,
|
||||
beginUpdateProgress: () => undefined,
|
||||
endUpdateProgress: () => undefined,
|
||||
withUpdateProgress: async (_message, action) => action(),
|
||||
resolveConfiguredFieldName: () => null,
|
||||
resolveNoteFieldName: () => null,
|
||||
getAnimatedImageLeadInSeconds: async () => 0,
|
||||
extractFields: () => ({}),
|
||||
processSentence: (sentence) => sentence,
|
||||
setCardTypeFields: () => undefined,
|
||||
mergeFieldValue: (_existing, newValue) => newValue,
|
||||
formatMiscInfoPattern: () => '',
|
||||
getEffectiveSentenceCardConfig: () => ({
|
||||
model: 'Sentence',
|
||||
sentenceField: 'Sentence',
|
||||
audioField: 'SentenceAudio',
|
||||
lapisEnabled: false,
|
||||
kikuEnabled: false,
|
||||
kikuFieldGrouping: 'disabled',
|
||||
kikuDeleteDuplicateInAuto: false,
|
||||
}),
|
||||
getFallbackDurationSeconds: () => 10,
|
||||
appendKnownWordsFromNoteInfo: () => undefined,
|
||||
isUpdateInProgress: () => false,
|
||||
setUpdateInProgress: () => undefined,
|
||||
trackLastAddedNoteId: () => undefined,
|
||||
recordCardsMinedCallback: (count, noteIds) => {
|
||||
minedCards.push({ count, noteIds });
|
||||
},
|
||||
});
|
||||
|
||||
const created = await service.createSentenceCard('テスト', 0, 1);
|
||||
|
||||
assert.equal(created, true);
|
||||
assert.deepEqual(minedCards, [{ count: 1, noteIds: [42] }]);
|
||||
});
|
||||
|
||||
test('CardCreationService keeps updating after trackLastAddedNoteId throws', async () => {
|
||||
const calls = {
|
||||
notesInfo: 0,
|
||||
updateNoteFields: 0,
|
||||
};
|
||||
const service = new CardCreationService({
|
||||
getConfig: () =>
|
||||
({
|
||||
deck: 'Mining',
|
||||
fields: {
|
||||
sentence: 'Sentence',
|
||||
audio: 'SentenceAudio',
|
||||
},
|
||||
media: {
|
||||
generateAudio: false,
|
||||
generateImage: false,
|
||||
},
|
||||
behavior: {},
|
||||
ai: false,
|
||||
}) as AnkiConnectConfig,
|
||||
getAiConfig: () => ({}),
|
||||
getTimingTracker: () => ({}) as never,
|
||||
getMpvClient: () =>
|
||||
({
|
||||
currentVideoPath: '/video.mp4',
|
||||
currentSubText: '字幕',
|
||||
currentSubStart: 1,
|
||||
currentSubEnd: 2,
|
||||
currentTimePos: 1.5,
|
||||
currentAudioStreamIndex: 0,
|
||||
}) as never,
|
||||
client: {
|
||||
addNote: async () => 42,
|
||||
addTags: async () => undefined,
|
||||
notesInfo: async () => {
|
||||
calls.notesInfo += 1;
|
||||
return [
|
||||
{
|
||||
noteId: 42,
|
||||
fields: {
|
||||
Sentence: { value: 'existing' },
|
||||
},
|
||||
},
|
||||
];
|
||||
},
|
||||
updateNoteFields: async () => {
|
||||
calls.updateNoteFields += 1;
|
||||
},
|
||||
storeMediaFile: async () => undefined,
|
||||
findNotes: async () => [],
|
||||
retrieveMediaFile: async () => '',
|
||||
},
|
||||
mediaGenerator: {
|
||||
generateAudio: async () => null,
|
||||
generateScreenshot: async () => null,
|
||||
generateAnimatedImage: async () => null,
|
||||
},
|
||||
showOsdNotification: () => undefined,
|
||||
showUpdateResult: () => undefined,
|
||||
showStatusNotification: () => undefined,
|
||||
showNotification: async () => undefined,
|
||||
beginUpdateProgress: () => undefined,
|
||||
endUpdateProgress: () => undefined,
|
||||
withUpdateProgress: async (_message, action) => action(),
|
||||
resolveConfiguredFieldName: () => null,
|
||||
resolveNoteFieldName: () => null,
|
||||
getAnimatedImageLeadInSeconds: async () => 0,
|
||||
extractFields: () => ({}),
|
||||
processSentence: (sentence) => sentence,
|
||||
setCardTypeFields: (updatedFields) => {
|
||||
updatedFields.CardType = 'sentence';
|
||||
},
|
||||
mergeFieldValue: (_existing, newValue) => newValue,
|
||||
formatMiscInfoPattern: () => '',
|
||||
getEffectiveSentenceCardConfig: () => ({
|
||||
model: 'Sentence',
|
||||
sentenceField: 'Sentence',
|
||||
audioField: 'SentenceAudio',
|
||||
lapisEnabled: false,
|
||||
kikuEnabled: false,
|
||||
kikuFieldGrouping: 'disabled',
|
||||
kikuDeleteDuplicateInAuto: false,
|
||||
}),
|
||||
getFallbackDurationSeconds: () => 10,
|
||||
appendKnownWordsFromNoteInfo: () => undefined,
|
||||
isUpdateInProgress: () => false,
|
||||
setUpdateInProgress: () => undefined,
|
||||
trackLastAddedNoteId: () => {
|
||||
throw new Error('track failed');
|
||||
},
|
||||
});
|
||||
|
||||
const created = await service.createSentenceCard('テスト', 0, 1);
|
||||
|
||||
assert.equal(created, true);
|
||||
assert.equal(calls.notesInfo, 1);
|
||||
assert.equal(calls.updateNoteFields, 1);
|
||||
});
|
||||
|
||||
test('CardCreationService keeps updating after recordCardsMinedCallback throws', async () => {
|
||||
const calls = {
|
||||
notesInfo: 0,
|
||||
updateNoteFields: 0,
|
||||
};
|
||||
const service = new CardCreationService({
|
||||
getConfig: () =>
|
||||
({
|
||||
deck: 'Mining',
|
||||
fields: {
|
||||
sentence: 'Sentence',
|
||||
audio: 'SentenceAudio',
|
||||
},
|
||||
media: {
|
||||
generateAudio: false,
|
||||
generateImage: false,
|
||||
},
|
||||
behavior: {},
|
||||
ai: false,
|
||||
}) as AnkiConnectConfig,
|
||||
getAiConfig: () => ({}),
|
||||
getTimingTracker: () => ({}) as never,
|
||||
getMpvClient: () =>
|
||||
({
|
||||
currentVideoPath: '/video.mp4',
|
||||
currentSubText: '字幕',
|
||||
currentSubStart: 1,
|
||||
currentSubEnd: 2,
|
||||
currentTimePos: 1.5,
|
||||
currentAudioStreamIndex: 0,
|
||||
}) as never,
|
||||
client: {
|
||||
addNote: async () => 42,
|
||||
addTags: async () => undefined,
|
||||
notesInfo: async () => {
|
||||
calls.notesInfo += 1;
|
||||
return [
|
||||
{
|
||||
noteId: 42,
|
||||
fields: {
|
||||
Sentence: { value: 'existing' },
|
||||
},
|
||||
},
|
||||
];
|
||||
},
|
||||
updateNoteFields: async () => {
|
||||
calls.updateNoteFields += 1;
|
||||
},
|
||||
storeMediaFile: async () => undefined,
|
||||
findNotes: async () => [],
|
||||
retrieveMediaFile: async () => '',
|
||||
},
|
||||
mediaGenerator: {
|
||||
generateAudio: async () => null,
|
||||
generateScreenshot: async () => null,
|
||||
generateAnimatedImage: async () => null,
|
||||
},
|
||||
showOsdNotification: () => undefined,
|
||||
showUpdateResult: () => undefined,
|
||||
showStatusNotification: () => undefined,
|
||||
showNotification: async () => undefined,
|
||||
beginUpdateProgress: () => undefined,
|
||||
endUpdateProgress: () => undefined,
|
||||
withUpdateProgress: async (_message, action) => action(),
|
||||
resolveConfiguredFieldName: () => null,
|
||||
resolveNoteFieldName: () => null,
|
||||
getAnimatedImageLeadInSeconds: async () => 0,
|
||||
extractFields: () => ({}),
|
||||
processSentence: (sentence) => sentence,
|
||||
setCardTypeFields: (updatedFields) => {
|
||||
updatedFields.CardType = 'sentence';
|
||||
},
|
||||
mergeFieldValue: (_existing, newValue) => newValue,
|
||||
formatMiscInfoPattern: () => '',
|
||||
getEffectiveSentenceCardConfig: () => ({
|
||||
model: 'Sentence',
|
||||
sentenceField: 'Sentence',
|
||||
audioField: 'SentenceAudio',
|
||||
lapisEnabled: false,
|
||||
kikuEnabled: false,
|
||||
kikuFieldGrouping: 'disabled',
|
||||
kikuDeleteDuplicateInAuto: false,
|
||||
}),
|
||||
getFallbackDurationSeconds: () => 10,
|
||||
appendKnownWordsFromNoteInfo: () => undefined,
|
||||
isUpdateInProgress: () => false,
|
||||
setUpdateInProgress: () => undefined,
|
||||
recordCardsMinedCallback: () => {
|
||||
throw new Error('record failed');
|
||||
},
|
||||
});
|
||||
|
||||
const created = await service.createSentenceCard('テスト', 0, 1);
|
||||
|
||||
assert.equal(created, true);
|
||||
assert.equal(calls.notesInfo, 1);
|
||||
assert.equal(calls.updateNoteFields, 1);
|
||||
});
|
||||
@@ -1,4 +1,8 @@
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
||||
import {
|
||||
getConfiguredWordFieldName,
|
||||
getPreferredWordValueFromExtractedFields,
|
||||
} from '../anki-field-config';
|
||||
import { AiConfig, AnkiConnectConfig } from '../types';
|
||||
import { createLogger } from '../logger';
|
||||
import { SubtitleTimingTracker } from '../subtitle-timing-tracker';
|
||||
@@ -26,6 +30,7 @@ interface CardCreationClient {
|
||||
updateNoteFields(noteId: number, fields: Record<string, string>): Promise<void>;
|
||||
storeMediaFile(filename: string, data: Buffer): Promise<void>;
|
||||
findNotes(query: string, options?: { maxRetries?: number }): Promise<number[]>;
|
||||
retrieveMediaFile(filename: string): Promise<string>;
|
||||
}
|
||||
|
||||
interface CardCreationMediaGenerator {
|
||||
@@ -56,6 +61,7 @@ interface CardCreationMediaGenerator {
|
||||
maxWidth?: number;
|
||||
maxHeight?: number;
|
||||
crf?: number;
|
||||
leadingStillDuration?: number;
|
||||
},
|
||||
): Promise<Buffer | null>;
|
||||
}
|
||||
@@ -69,6 +75,7 @@ interface CardCreationDeps {
|
||||
client: CardCreationClient;
|
||||
mediaGenerator: CardCreationMediaGenerator;
|
||||
showOsdNotification: (text: string) => void;
|
||||
showUpdateResult: (message: string, success: boolean) => void;
|
||||
showStatusNotification: (message: string) => void;
|
||||
showNotification: (noteId: number, label: string | number, errorSuffix?: string) => Promise<void>;
|
||||
beginUpdateProgress: (initialMessage: string) => void;
|
||||
@@ -79,6 +86,7 @@ interface CardCreationDeps {
|
||||
...preferredNames: (string | undefined)[]
|
||||
) => string | null;
|
||||
resolveNoteFieldName: (noteInfo: CardCreationNoteInfo, preferredName?: string) => string | null;
|
||||
getAnimatedImageLeadInSeconds: (noteInfo: CardCreationNoteInfo) => Promise<number>;
|
||||
extractFields: (fields: Record<string, { value: string }>) => Record<string, string>;
|
||||
processSentence: (mpvSentence: string, noteFields: Record<string, string>) => string;
|
||||
setCardTypeFields: (
|
||||
@@ -102,6 +110,7 @@ interface CardCreationDeps {
|
||||
isUpdateInProgress: () => boolean;
|
||||
setUpdateInProgress: (value: boolean) => void;
|
||||
trackLastAddedNoteId?: (noteId: number) => void;
|
||||
recordCardsMinedCallback?: (count: number, noteIds?: number[]) => void;
|
||||
}
|
||||
|
||||
export class CardCreationService {
|
||||
@@ -201,7 +210,10 @@ export class CardCreationService {
|
||||
|
||||
const noteInfo = notesInfoResult[0]!;
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
const expressionText = fields.expression || fields.word || '';
|
||||
const expressionText = getPreferredWordValueFromExtractedFields(
|
||||
fields,
|
||||
this.deps.getConfig(),
|
||||
);
|
||||
const sentenceAudioField = this.getResolvedSentenceAudioFieldName(noteInfo);
|
||||
const sentenceField = this.deps.getEffectiveSentenceCardConfig().sentenceField;
|
||||
|
||||
@@ -251,11 +263,13 @@ export class CardCreationService {
|
||||
|
||||
if (this.deps.getConfig().media?.generateImage) {
|
||||
try {
|
||||
const animatedLeadInSeconds = await this.deps.getAnimatedImageLeadInSeconds(noteInfo);
|
||||
const imageFilename = this.generateImageFilename();
|
||||
const imageBuffer = await this.generateImageBuffer(
|
||||
mpvClient.currentVideoPath,
|
||||
rangeStart,
|
||||
rangeEnd,
|
||||
animatedLeadInSeconds,
|
||||
);
|
||||
|
||||
if (imageBuffer) {
|
||||
@@ -368,7 +382,10 @@ export class CardCreationService {
|
||||
|
||||
const noteInfo = notesInfoResult[0]!;
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
const expressionText = fields.expression || fields.word || '';
|
||||
const expressionText = getPreferredWordValueFromExtractedFields(
|
||||
fields,
|
||||
this.deps.getConfig(),
|
||||
);
|
||||
|
||||
const updatedFields: Record<string, string> = {};
|
||||
const errors: string[] = [];
|
||||
@@ -404,11 +421,13 @@ export class CardCreationService {
|
||||
|
||||
if (this.deps.getConfig().media?.generateImage) {
|
||||
try {
|
||||
const animatedLeadInSeconds = await this.deps.getAnimatedImageLeadInSeconds(noteInfo);
|
||||
const imageFilename = this.generateImageFilename();
|
||||
const imageBuffer = await this.generateImageBuffer(
|
||||
mpvClient.currentVideoPath,
|
||||
startTime,
|
||||
endTime,
|
||||
animatedLeadInSeconds,
|
||||
);
|
||||
|
||||
const imageField = this.deps.getConfig().fields?.image;
|
||||
@@ -519,7 +538,7 @@ export class CardCreationService {
|
||||
|
||||
if (sentenceCardConfig.lapisEnabled || sentenceCardConfig.kikuEnabled) {
|
||||
fields.IsSentenceCard = 'x';
|
||||
fields.Expression = sentence;
|
||||
fields[getConfiguredWordFieldName(this.deps.getConfig())] = sentence;
|
||||
}
|
||||
|
||||
const deck = this.deps.getConfig().deck || 'Default';
|
||||
@@ -532,13 +551,24 @@ export class CardCreationService {
|
||||
this.getConfiguredAnkiTags(),
|
||||
);
|
||||
log.info('Created sentence card:', noteId);
|
||||
this.deps.trackLastAddedNoteId?.(noteId);
|
||||
} catch (error) {
|
||||
log.error('Failed to create sentence card:', (error as Error).message);
|
||||
this.deps.showOsdNotification(`Sentence card failed: ${(error as Error).message}`);
|
||||
this.deps.showUpdateResult(`Sentence card failed: ${(error as Error).message}`, false);
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
this.deps.trackLastAddedNoteId?.(noteId);
|
||||
} catch (error) {
|
||||
log.warn('Failed to track last added note:', (error as Error).message);
|
||||
}
|
||||
|
||||
try {
|
||||
this.deps.recordCardsMinedCallback?.(1, [noteId]);
|
||||
} catch (error) {
|
||||
log.warn('Failed to record mined card:', (error as Error).message);
|
||||
}
|
||||
|
||||
try {
|
||||
const noteInfoResult = await this.deps.client.notesInfo([noteId]);
|
||||
const noteInfos = noteInfoResult as CardCreationNoteInfo[];
|
||||
@@ -632,7 +662,7 @@ export class CardCreationService {
|
||||
});
|
||||
} catch (error) {
|
||||
log.error('Error creating sentence card:', (error as Error).message);
|
||||
this.deps.showOsdNotification(`Sentence card failed: ${(error as Error).message}`);
|
||||
this.deps.showUpdateResult(`Sentence card failed: ${(error as Error).message}`, false);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -669,6 +699,7 @@ export class CardCreationService {
|
||||
videoPath: string,
|
||||
startTime: number,
|
||||
endTime: number,
|
||||
animatedLeadInSeconds = 0,
|
||||
): Promise<Buffer | null> {
|
||||
const mpvClient = this.deps.getMpvClient();
|
||||
if (!mpvClient) {
|
||||
@@ -697,6 +728,7 @@ export class CardCreationService {
|
||||
maxWidth: this.deps.getConfig().media?.animatedMaxWidth,
|
||||
maxHeight: this.deps.getConfig().media?.animatedMaxHeight,
|
||||
crf: this.deps.getConfig().media?.animatedCrf,
|
||||
leadingStillDuration: animatedLeadInSeconds,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ export interface DuplicateDetectionDeps {
|
||||
findNotes: (query: string, options?: { maxRetries?: number }) => Promise<unknown>;
|
||||
notesInfo: (noteIds: number[]) => Promise<unknown>;
|
||||
getDeck: () => string | null | undefined;
|
||||
getWordFieldCandidates?: () => string[];
|
||||
resolveFieldName: (noteInfo: NoteInfo, preferredName: string) => string | null;
|
||||
logInfo?: (message: string) => void;
|
||||
logDebug?: (message: string) => void;
|
||||
@@ -23,7 +24,12 @@ export async function findDuplicateNote(
|
||||
noteInfo: NoteInfo,
|
||||
deps: DuplicateDetectionDeps,
|
||||
): Promise<number | null> {
|
||||
const sourceCandidates = getDuplicateSourceCandidates(noteInfo, expression);
|
||||
const configuredWordFieldCandidates = deps.getWordFieldCandidates?.() ?? ['Expression', 'Word'];
|
||||
const sourceCandidates = getDuplicateSourceCandidates(
|
||||
noteInfo,
|
||||
expression,
|
||||
configuredWordFieldCandidates,
|
||||
);
|
||||
if (sourceCandidates.length === 0) return null;
|
||||
deps.logInfo?.(
|
||||
`[duplicate] start expr="${expression}" sourceCandidates=${sourceCandidates
|
||||
@@ -81,6 +87,7 @@ export async function findDuplicateNote(
|
||||
noteIds,
|
||||
excludeNoteId,
|
||||
sourceCandidates.map((candidate) => candidate.value),
|
||||
configuredWordFieldCandidates,
|
||||
deps,
|
||||
);
|
||||
} catch (error) {
|
||||
@@ -93,6 +100,7 @@ function findFirstExactDuplicateNoteId(
|
||||
candidateNoteIds: Iterable<number>,
|
||||
excludeNoteId: number,
|
||||
sourceValues: string[],
|
||||
candidateFieldNames: string[],
|
||||
deps: DuplicateDetectionDeps,
|
||||
): Promise<number | null> {
|
||||
const candidates = Array.from(candidateNoteIds).filter((id) => id !== excludeNoteId);
|
||||
@@ -116,7 +124,6 @@ function findFirstExactDuplicateNoteId(
|
||||
const notesInfoResult = (await deps.notesInfo(chunk)) as unknown[];
|
||||
const notesInfo = notesInfoResult as NoteInfo[];
|
||||
for (const noteInfo of notesInfo) {
|
||||
const candidateFieldNames = ['word', 'expression'];
|
||||
for (const candidateFieldName of candidateFieldNames) {
|
||||
const resolvedField = deps.resolveFieldName(noteInfo, candidateFieldName);
|
||||
if (!resolvedField) continue;
|
||||
@@ -150,13 +157,15 @@ function getDuplicateCandidateFieldNames(fieldName: string): string[] {
|
||||
function getDuplicateSourceCandidates(
|
||||
noteInfo: NoteInfo,
|
||||
fallbackExpression: string,
|
||||
configuredFieldNames: string[],
|
||||
): Array<{ fieldName: string; value: string }> {
|
||||
const candidates: Array<{ fieldName: string; value: string }> = [];
|
||||
const dedupeKey = new Set<string>();
|
||||
const configuredFieldNameSet = new Set(configuredFieldNames.map((name) => name.toLowerCase()));
|
||||
|
||||
for (const fieldName of Object.keys(noteInfo.fields)) {
|
||||
const lower = fieldName.toLowerCase();
|
||||
if (lower !== 'word' && lower !== 'expression') continue;
|
||||
if (!configuredFieldNameSet.has(lower)) continue;
|
||||
const value = noteInfo.fields[fieldName]?.value?.trim() ?? '';
|
||||
if (!value) continue;
|
||||
const key = `${lower}:${normalizeDuplicateValue(value)}`;
|
||||
@@ -167,9 +176,10 @@ function getDuplicateSourceCandidates(
|
||||
|
||||
const trimmedFallback = fallbackExpression.trim();
|
||||
if (trimmedFallback.length > 0) {
|
||||
const fallbackKey = `expression:${normalizeDuplicateValue(trimmedFallback)}`;
|
||||
const fallbackFieldName = configuredFieldNames[0]?.toLowerCase() || 'expression';
|
||||
const fallbackKey = `${fallbackFieldName}:${normalizeDuplicateValue(trimmedFallback)}`;
|
||||
if (!dedupeKey.has(fallbackKey)) {
|
||||
candidates.push({ fieldName: 'expression', value: trimmedFallback });
|
||||
candidates.push({ fieldName: configuredFieldNames[0] || 'Expression', value: trimmedFallback });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { AnkiConnectConfig } from '../types';
|
||||
import { getConfiguredWordFieldName } from '../anki-field-config';
|
||||
|
||||
interface FieldGroupingMergeMedia {
|
||||
audioField?: string;
|
||||
@@ -27,7 +28,7 @@ interface FieldGroupingMergeDeps {
|
||||
) => string | null;
|
||||
extractFields: (fields: Record<string, { value: string }>) => Record<string, string>;
|
||||
processSentence: (mpvSentence: string, noteFields: Record<string, string>) => string;
|
||||
generateMediaForMerge: () => Promise<FieldGroupingMergeMedia>;
|
||||
generateMediaForMerge: (noteInfo: FieldGroupingMergeNoteInfo) => Promise<FieldGroupingMergeMedia>;
|
||||
warnFieldParseOnce: (fieldName: string, reason: string, detail?: string) => void;
|
||||
}
|
||||
|
||||
@@ -77,6 +78,7 @@ export class FieldGroupingMergeCollaborator {
|
||||
includeGeneratedMedia: boolean,
|
||||
): Promise<Record<string, string>> {
|
||||
const config = this.deps.getConfig();
|
||||
const configuredWordField = getConfiguredWordFieldName(config);
|
||||
const groupableFields = this.getGroupableFieldNames();
|
||||
const keepFieldNames = Object.keys(keepNoteInfo.fields);
|
||||
const sourceFields: Record<string, string> = {};
|
||||
@@ -98,11 +100,17 @@ export class FieldGroupingMergeCollaborator {
|
||||
if (!sourceFields['Sentence'] && sourceFields['SentenceFurigana']) {
|
||||
sourceFields['Sentence'] = sourceFields['SentenceFurigana'];
|
||||
}
|
||||
if (!sourceFields['Expression'] && sourceFields['Word']) {
|
||||
sourceFields['Expression'] = sourceFields['Word'];
|
||||
if (!sourceFields[configuredWordField] && sourceFields['Expression']) {
|
||||
sourceFields[configuredWordField] = sourceFields['Expression'];
|
||||
}
|
||||
if (!sourceFields['Word'] && sourceFields['Expression']) {
|
||||
sourceFields['Word'] = sourceFields['Expression'];
|
||||
if (!sourceFields[configuredWordField] && sourceFields['Word']) {
|
||||
sourceFields[configuredWordField] = sourceFields['Word'];
|
||||
}
|
||||
if (!sourceFields['Expression'] && sourceFields[configuredWordField]) {
|
||||
sourceFields['Expression'] = sourceFields[configuredWordField];
|
||||
}
|
||||
if (!sourceFields['Word'] && sourceFields[configuredWordField]) {
|
||||
sourceFields['Word'] = sourceFields[configuredWordField];
|
||||
}
|
||||
if (!sourceFields['SentenceAudio'] && sourceFields['ExpressionAudio']) {
|
||||
sourceFields['SentenceAudio'] = sourceFields['ExpressionAudio'];
|
||||
@@ -124,7 +132,7 @@ export class FieldGroupingMergeCollaborator {
|
||||
}
|
||||
|
||||
if (includeGeneratedMedia) {
|
||||
const media = await this.deps.generateMediaForMerge();
|
||||
const media = await this.deps.generateMediaForMerge(keepNoteInfo);
|
||||
if (media.audioField && media.audioValue && !sourceFields[media.audioField]) {
|
||||
sourceFields[media.audioField] = media.audioValue;
|
||||
}
|
||||
@@ -148,6 +156,7 @@ export class FieldGroupingMergeCollaborator {
|
||||
const keepFieldNormalized = keepFieldName.toLowerCase();
|
||||
if (
|
||||
keepFieldNormalized === 'expression' ||
|
||||
keepFieldNormalized === configuredWordField.toLowerCase() ||
|
||||
keepFieldNormalized === 'expressionfurigana' ||
|
||||
keepFieldNormalized === 'expressionreading' ||
|
||||
keepFieldNormalized === 'expressionaudio'
|
||||
|
||||
@@ -24,6 +24,7 @@ function createWorkflowHarness() {
|
||||
const updates: Array<{ noteId: number; fields: Record<string, string> }> = [];
|
||||
const deleted: number[][] = [];
|
||||
const statuses: string[] = [];
|
||||
const rememberedMerges: Array<{ deletedNoteId: number; keptNoteId: number }> = [];
|
||||
const mergeCalls: Array<{
|
||||
keepNoteId: number;
|
||||
deleteNoteId: number;
|
||||
@@ -99,6 +100,9 @@ function createWorkflowHarness() {
|
||||
hasFieldValue: (_noteInfo: NoteInfo, _field?: string) => false,
|
||||
addConfiguredTagsToNote: async () => undefined,
|
||||
removeTrackedNoteId: () => undefined,
|
||||
rememberMergedNoteIds: (deletedNoteId: number, keptNoteId: number) => {
|
||||
rememberedMerges.push({ deletedNoteId, keptNoteId });
|
||||
},
|
||||
showStatusNotification: (message: string) => {
|
||||
statuses.push(message);
|
||||
},
|
||||
@@ -113,6 +117,7 @@ function createWorkflowHarness() {
|
||||
workflow: new FieldGroupingWorkflow(deps),
|
||||
updates,
|
||||
deleted,
|
||||
rememberedMerges,
|
||||
statuses,
|
||||
mergeCalls,
|
||||
setManualChoice: (choice: typeof manualChoice) => {
|
||||
@@ -136,6 +141,7 @@ test('FieldGroupingWorkflow auto merge updates keep note and deletes duplicate b
|
||||
assert.equal(harness.updates.length, 1);
|
||||
assert.equal(harness.updates[0]?.noteId, 1);
|
||||
assert.deepEqual(harness.deleted, [[2]]);
|
||||
assert.deepEqual(harness.rememberedMerges, [{ deletedNoteId: 2, keptNoteId: 1 }]);
|
||||
assert.equal(harness.statuses.length, 1);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { KikuDuplicateCardInfo, KikuFieldGroupingChoice } from '../types';
|
||||
import { getPreferredWordValueFromExtractedFields } from '../anki-field-config';
|
||||
|
||||
export interface FieldGroupingWorkflowNoteInfo {
|
||||
noteId: number;
|
||||
@@ -13,6 +14,7 @@ export interface FieldGroupingWorkflowDeps {
|
||||
};
|
||||
getConfig: () => {
|
||||
fields?: {
|
||||
word?: string;
|
||||
audio?: string;
|
||||
image?: string;
|
||||
};
|
||||
@@ -48,6 +50,7 @@ export interface FieldGroupingWorkflowDeps {
|
||||
hasFieldValue: (noteInfo: FieldGroupingWorkflowNoteInfo, preferredFieldName?: string) => boolean;
|
||||
addConfiguredTagsToNote: (noteId: number) => Promise<void>;
|
||||
removeTrackedNoteId: (noteId: number) => void;
|
||||
rememberMergedNoteIds: (deletedNoteId: number, keptNoteId: number) => void;
|
||||
showStatusNotification: (message: string) => void;
|
||||
showNotification: (noteId: number, label: string | number) => Promise<void>;
|
||||
showOsdNotification: (message: string) => void;
|
||||
@@ -156,6 +159,7 @@ export class FieldGroupingWorkflow {
|
||||
if (deleteDuplicate) {
|
||||
await this.deps.client.deleteNotes([deleteNoteId]);
|
||||
this.deps.removeTrackedNoteId(deleteNoteId);
|
||||
this.deps.rememberMergedNoteIds(deleteNoteId, keepNoteId);
|
||||
}
|
||||
|
||||
this.deps.logInfo('Merged duplicate card:', expression, 'into note:', keepNoteId);
|
||||
@@ -176,7 +180,8 @@ export class FieldGroupingWorkflow {
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
return {
|
||||
noteId: noteInfo.noteId,
|
||||
expression: fields.expression || fields.word || fallbackExpression,
|
||||
expression:
|
||||
getPreferredWordValueFromExtractedFields(fields, this.deps.getConfig()) || fallbackExpression,
|
||||
sentencePreview: this.deps.truncateSentence(
|
||||
fields[(sentenceCardConfig.sentenceField || 'sentence').toLowerCase()] ||
|
||||
(isOriginal ? '' : this.deps.getCurrentSubtitleText() || ''),
|
||||
@@ -191,7 +196,7 @@ export class FieldGroupingWorkflow {
|
||||
|
||||
private getExpression(noteInfo: FieldGroupingWorkflowNoteInfo): string {
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
return fields.expression || fields.word || '';
|
||||
return getPreferredWordValueFromExtractedFields(fields, this.deps.getConfig());
|
||||
}
|
||||
|
||||
private async resolveFieldGroupingCallback(): Promise<
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { KikuMergePreviewResponse } from '../types';
|
||||
import { createLogger } from '../logger';
|
||||
import { getPreferredWordValueFromExtractedFields } from '../anki-field-config';
|
||||
|
||||
const log = createLogger('anki').child('integration.field-grouping');
|
||||
|
||||
@@ -9,6 +10,11 @@ interface FieldGroupingNoteInfo {
|
||||
}
|
||||
|
||||
interface FieldGroupingDeps {
|
||||
getConfig: () => {
|
||||
fields?: {
|
||||
word?: string;
|
||||
};
|
||||
};
|
||||
getEffectiveSentenceCardConfig: () => {
|
||||
model?: string;
|
||||
sentenceField: string;
|
||||
@@ -102,7 +108,10 @@ export class FieldGroupingService {
|
||||
}
|
||||
const noteInfoBeforeUpdate = notesInfo[0]!;
|
||||
const fields = this.deps.extractFields(noteInfoBeforeUpdate.fields);
|
||||
const expressionText = fields.expression || fields.word || '';
|
||||
const expressionText = getPreferredWordValueFromExtractedFields(
|
||||
fields,
|
||||
this.deps.getConfig(),
|
||||
);
|
||||
if (!expressionText) {
|
||||
this.deps.showOsdNotification('No expression/word field found');
|
||||
return;
|
||||
|
||||
535
src/anki-integration/known-word-cache.test.ts
Normal file
535
src/anki-integration/known-word-cache.test.ts
Normal file
@@ -0,0 +1,535 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
import type { AnkiConnectConfig } from '../types';
|
||||
import { KnownWordCacheManager } from './known-word-cache';
|
||||
|
||||
async function waitForCondition(
|
||||
condition: () => boolean,
|
||||
timeoutMs = 500,
|
||||
intervalMs = 10,
|
||||
): Promise<void> {
|
||||
const startedAt = Date.now();
|
||||
while (Date.now() - startedAt < timeoutMs) {
|
||||
if (condition()) {
|
||||
return;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, intervalMs));
|
||||
}
|
||||
throw new Error('Timed out waiting for condition');
|
||||
}
|
||||
|
||||
function createKnownWordCacheHarness(config: AnkiConnectConfig): {
|
||||
manager: KnownWordCacheManager;
|
||||
calls: {
|
||||
findNotes: number;
|
||||
notesInfo: number;
|
||||
};
|
||||
statePath: string;
|
||||
clientState: {
|
||||
findNotesResult: number[];
|
||||
notesInfoResult: Array<{ noteId: number; fields: Record<string, { value: string }> }>;
|
||||
findNotesByQuery: Map<string, number[]>;
|
||||
};
|
||||
cleanup: () => void;
|
||||
} {
|
||||
const stateDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-known-word-cache-'));
|
||||
const statePath = path.join(stateDir, 'known-words-cache.json');
|
||||
const calls = {
|
||||
findNotes: 0,
|
||||
notesInfo: 0,
|
||||
};
|
||||
const clientState = {
|
||||
findNotesResult: [] as number[],
|
||||
notesInfoResult: [] as Array<{ noteId: number; fields: Record<string, { value: string }> }>,
|
||||
findNotesByQuery: new Map<string, number[]>(),
|
||||
};
|
||||
const manager = new KnownWordCacheManager({
|
||||
client: {
|
||||
findNotes: async (query) => {
|
||||
calls.findNotes += 1;
|
||||
if (clientState.findNotesByQuery.has(query)) {
|
||||
return clientState.findNotesByQuery.get(query) ?? [];
|
||||
}
|
||||
return clientState.findNotesResult;
|
||||
},
|
||||
notesInfo: async (noteIds) => {
|
||||
calls.notesInfo += 1;
|
||||
return clientState.notesInfoResult.filter((note) => noteIds.includes(note.noteId));
|
||||
},
|
||||
},
|
||||
getConfig: () => config,
|
||||
knownWordCacheStatePath: statePath,
|
||||
showStatusNotification: () => undefined,
|
||||
});
|
||||
|
||||
return {
|
||||
manager,
|
||||
calls,
|
||||
statePath,
|
||||
clientState,
|
||||
cleanup: () => {
|
||||
fs.rmSync(stateDir, { recursive: true, force: true });
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test('KnownWordCacheManager startLifecycle keeps fresh persisted cache without immediate refresh', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
refreshMinutes: 60,
|
||||
},
|
||||
};
|
||||
const { manager, calls, statePath, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
statePath,
|
||||
JSON.stringify({
|
||||
version: 2,
|
||||
refreshedAtMs: Date.now(),
|
||||
scope: '{"refreshMinutes":60,"scope":"is:note","fieldsWord":""}',
|
||||
words: ['猫'],
|
||||
notes: {
|
||||
'1': ['猫'],
|
||||
},
|
||||
}),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
manager.startLifecycle();
|
||||
await new Promise((resolve) => setTimeout(resolve, 25));
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
assert.equal(calls.findNotes, 0);
|
||||
assert.equal(calls.notesInfo, 0);
|
||||
} finally {
|
||||
manager.stopLifecycle();
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager startLifecycle immediately refreshes stale persisted cache', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
refreshMinutes: 1,
|
||||
},
|
||||
};
|
||||
const { manager, calls, statePath, clientState, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
statePath,
|
||||
JSON.stringify({
|
||||
version: 2,
|
||||
refreshedAtMs: Date.now() - 61_000,
|
||||
scope: '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}',
|
||||
words: ['猫'],
|
||||
notes: {
|
||||
'1': ['猫'],
|
||||
},
|
||||
}),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
clientState.findNotesResult = [1];
|
||||
clientState.notesInfoResult = [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Word: { value: '犬' },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
manager.startLifecycle();
|
||||
await waitForCondition(() => calls.findNotes === 1 && calls.notesInfo === 1);
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
assert.equal(manager.isKnownWord('犬'), true);
|
||||
} finally {
|
||||
manager.stopLifecycle();
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager invalidates persisted cache when fields.word changes', () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
deck: 'Mining',
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
};
|
||||
const { manager, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
manager.appendFromNoteInfo({
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Word: { value: '猫' },
|
||||
},
|
||||
});
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
|
||||
config.fields = {
|
||||
...config.fields,
|
||||
word: 'Expression',
|
||||
};
|
||||
|
||||
(
|
||||
manager as unknown as {
|
||||
loadKnownWordCacheState: () => void;
|
||||
}
|
||||
).loadKnownWordCacheState();
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager refresh incrementally reconciles deleted and edited note words', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
};
|
||||
const { manager, statePath, clientState, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
fs.writeFileSync(
|
||||
statePath,
|
||||
JSON.stringify({
|
||||
version: 2,
|
||||
refreshedAtMs: 1,
|
||||
scope: '{"refreshMinutes":1440,"scope":"is:note","fieldsWord":"Word"}',
|
||||
words: ['猫', '犬'],
|
||||
notes: {
|
||||
'1': ['猫'],
|
||||
'2': ['犬'],
|
||||
},
|
||||
}),
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
(
|
||||
manager as unknown as {
|
||||
loadKnownWordCacheState: () => void;
|
||||
}
|
||||
).loadKnownWordCacheState();
|
||||
|
||||
clientState.findNotesResult = [1];
|
||||
clientState.notesInfoResult = [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Word: { value: '鳥' },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await manager.refresh(true);
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
assert.equal(manager.isKnownWord('犬'), false);
|
||||
assert.equal(manager.isKnownWord('鳥'), true);
|
||||
|
||||
const persisted = JSON.parse(fs.readFileSync(statePath, 'utf-8')) as {
|
||||
version: number;
|
||||
words: string[];
|
||||
notes?: Record<string, string[]>;
|
||||
};
|
||||
assert.equal(persisted.version, 2);
|
||||
assert.deepEqual(persisted.words.sort(), ['鳥']);
|
||||
assert.deepEqual(persisted.notes, {
|
||||
'1': ['鳥'],
|
||||
});
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager skips malformed note info without fields', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
};
|
||||
const { manager, clientState, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
clientState.findNotesResult = [1, 2];
|
||||
clientState.notesInfoResult = [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: undefined as unknown as Record<string, { value: string }>,
|
||||
},
|
||||
{
|
||||
noteId: 2,
|
||||
fields: {
|
||||
Word: { value: '猫' },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await manager.refresh(true);
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
assert.equal(manager.isKnownWord('犬'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager preserves cache state key captured before refresh work', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
refreshMinutes: 1,
|
||||
},
|
||||
};
|
||||
const stateDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-known-word-cache-key-'));
|
||||
const statePath = path.join(stateDir, 'known-words-cache.json');
|
||||
let notesInfoStarted = false;
|
||||
let releaseNotesInfo!: () => void;
|
||||
const notesInfoGate = new Promise<void>((resolve) => {
|
||||
releaseNotesInfo = resolve;
|
||||
});
|
||||
const manager = new KnownWordCacheManager({
|
||||
client: {
|
||||
findNotes: async () => [1],
|
||||
notesInfo: async () => {
|
||||
notesInfoStarted = true;
|
||||
await notesInfoGate;
|
||||
return [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Word: { value: '猫' },
|
||||
},
|
||||
},
|
||||
];
|
||||
},
|
||||
},
|
||||
getConfig: () => config,
|
||||
knownWordCacheStatePath: statePath,
|
||||
showStatusNotification: () => undefined,
|
||||
});
|
||||
|
||||
try {
|
||||
const refreshPromise = manager.refresh(true);
|
||||
await waitForCondition(() => notesInfoStarted);
|
||||
|
||||
config.fields = {
|
||||
...config.fields,
|
||||
word: 'Expression',
|
||||
};
|
||||
releaseNotesInfo();
|
||||
await refreshPromise;
|
||||
|
||||
const persisted = JSON.parse(fs.readFileSync(statePath, 'utf-8')) as {
|
||||
scope: string;
|
||||
words: string[];
|
||||
};
|
||||
assert.equal(
|
||||
persisted.scope,
|
||||
'{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}',
|
||||
);
|
||||
assert.deepEqual(persisted.words, ['猫']);
|
||||
} finally {
|
||||
fs.rmSync(stateDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager does not borrow fields from other decks during refresh', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
decks: {
|
||||
Mining: [],
|
||||
Reading: ['AltWord'],
|
||||
},
|
||||
},
|
||||
};
|
||||
const { manager, clientState, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
clientState.findNotesByQuery.set('deck:"Mining"', [1]);
|
||||
clientState.findNotesByQuery.set('deck:"Reading"', []);
|
||||
clientState.notesInfoResult = [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: {
|
||||
AltWord: { value: '猫' },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await manager.refresh(true);
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager invalidates persisted cache when per-deck fields change', () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
decks: {
|
||||
Mining: ['Word'],
|
||||
},
|
||||
},
|
||||
};
|
||||
const { manager, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
manager.appendFromNoteInfo({
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Word: { value: '猫' },
|
||||
},
|
||||
});
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
|
||||
config.knownWords = {
|
||||
...config.knownWords,
|
||||
decks: {
|
||||
Mining: ['Expression'],
|
||||
},
|
||||
};
|
||||
|
||||
(
|
||||
manager as unknown as {
|
||||
loadKnownWordCacheState: () => void;
|
||||
}
|
||||
).loadKnownWordCacheState();
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager preserves deck-specific field mappings during refresh', async () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
decks: {
|
||||
Mining: ['Expression'],
|
||||
Reading: ['Word'],
|
||||
},
|
||||
},
|
||||
};
|
||||
const { manager, clientState, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
clientState.findNotesByQuery.set('deck:"Mining"', [1]);
|
||||
clientState.findNotesByQuery.set('deck:"Reading"', [2]);
|
||||
clientState.notesInfoResult = [
|
||||
{
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Expression: { value: '猫' },
|
||||
Word: { value: 'should-not-count' },
|
||||
},
|
||||
},
|
||||
{
|
||||
noteId: 2,
|
||||
fields: {
|
||||
Word: { value: '犬' },
|
||||
Expression: { value: 'also-ignored' },
|
||||
},
|
||||
},
|
||||
];
|
||||
|
||||
await manager.refresh(true);
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
assert.equal(manager.isKnownWord('犬'), true);
|
||||
assert.equal(manager.isKnownWord('should-not-count'), false);
|
||||
assert.equal(manager.isKnownWord('also-ignored'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager uses the current deck fields for immediate append', () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
deck: 'Mining',
|
||||
fields: {
|
||||
word: 'Word',
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
decks: {
|
||||
Mining: ['Expression'],
|
||||
Reading: ['Word'],
|
||||
},
|
||||
},
|
||||
};
|
||||
const { manager, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
manager.appendFromNoteInfo({
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Expression: { value: '猫' },
|
||||
Word: { value: 'should-not-count' },
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), true);
|
||||
assert.equal(manager.isKnownWord('should-not-count'), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
|
||||
test('KnownWordCacheManager skips immediate append when addMinedWordsImmediately is disabled', () => {
|
||||
const config: AnkiConnectConfig = {
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
addMinedWordsImmediately: false,
|
||||
},
|
||||
};
|
||||
const { manager, statePath, cleanup } = createKnownWordCacheHarness(config);
|
||||
|
||||
try {
|
||||
manager.appendFromNoteInfo({
|
||||
noteId: 1,
|
||||
fields: {
|
||||
Expression: { value: '猫' },
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(manager.isKnownWord('猫'), false);
|
||||
assert.equal(fs.existsSync(statePath), false);
|
||||
} finally {
|
||||
cleanup();
|
||||
}
|
||||
});
|
||||
@@ -2,23 +2,85 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
||||
import { getConfiguredWordFieldName } from '../anki-field-config';
|
||||
import { AnkiConnectConfig } from '../types';
|
||||
import { createLogger } from '../logger';
|
||||
|
||||
const log = createLogger('anki').child('integration.known-word-cache');
|
||||
|
||||
function trimToNonEmptyString(value: unknown): string | null {
|
||||
if (typeof value !== 'string') return null;
|
||||
const trimmed = value.trim();
|
||||
return trimmed.length > 0 ? trimmed : null;
|
||||
}
|
||||
|
||||
export function getKnownWordCacheRefreshIntervalMinutes(config: AnkiConnectConfig): number {
|
||||
const refreshMinutes = config.knownWords?.refreshMinutes;
|
||||
return typeof refreshMinutes === 'number' && Number.isFinite(refreshMinutes) && refreshMinutes > 0
|
||||
? refreshMinutes
|
||||
: DEFAULT_ANKI_CONNECT_CONFIG.knownWords.refreshMinutes;
|
||||
}
|
||||
|
||||
export function getKnownWordCacheScopeForConfig(config: AnkiConnectConfig): string {
|
||||
const configuredDecks = config.knownWords?.decks;
|
||||
if (configuredDecks && typeof configuredDecks === 'object' && !Array.isArray(configuredDecks)) {
|
||||
const normalizedDecks = Object.entries(configuredDecks)
|
||||
.map(([deckName, fields]) => {
|
||||
const name = trimToNonEmptyString(deckName);
|
||||
if (!name) return null;
|
||||
const normalizedFields = Array.isArray(fields)
|
||||
? [
|
||||
...new Set(
|
||||
fields
|
||||
.map(String)
|
||||
.map(trimToNonEmptyString)
|
||||
.filter((field): field is string => Boolean(field)),
|
||||
),
|
||||
].sort()
|
||||
: [];
|
||||
return [name, normalizedFields];
|
||||
})
|
||||
.filter((entry): entry is [string, string[]] => entry !== null)
|
||||
.sort(([a], [b]) => a.localeCompare(b));
|
||||
if (normalizedDecks.length > 0) {
|
||||
return `decks:${JSON.stringify(normalizedDecks)}`;
|
||||
}
|
||||
}
|
||||
|
||||
const configuredDeck = trimToNonEmptyString(config.deck);
|
||||
return configuredDeck ? `deck:${configuredDeck}` : 'is:note';
|
||||
}
|
||||
|
||||
export function getKnownWordCacheLifecycleConfig(config: AnkiConnectConfig): string {
|
||||
return JSON.stringify({
|
||||
refreshMinutes: getKnownWordCacheRefreshIntervalMinutes(config),
|
||||
scope: getKnownWordCacheScopeForConfig(config),
|
||||
fieldsWord: trimToNonEmptyString(config.fields?.word) ?? '',
|
||||
});
|
||||
}
|
||||
|
||||
export interface KnownWordCacheNoteInfo {
|
||||
noteId: number;
|
||||
fields: Record<string, { value: string }>;
|
||||
}
|
||||
|
||||
interface KnownWordCacheState {
|
||||
interface KnownWordCacheStateV1 {
|
||||
readonly version: 1;
|
||||
readonly refreshedAtMs: number;
|
||||
readonly scope: string;
|
||||
readonly words: string[];
|
||||
}
|
||||
|
||||
interface KnownWordCacheStateV2 {
|
||||
readonly version: 2;
|
||||
readonly refreshedAtMs: number;
|
||||
readonly scope: string;
|
||||
readonly words: string[];
|
||||
readonly notes: Record<string, string[]>;
|
||||
}
|
||||
|
||||
type KnownWordCacheState = KnownWordCacheStateV1 | KnownWordCacheStateV2;
|
||||
|
||||
interface KnownWordCacheClient {
|
||||
findNotes: (
|
||||
query: string,
|
||||
@@ -36,11 +98,19 @@ interface KnownWordCacheDeps {
|
||||
showStatusNotification: (message: string) => void;
|
||||
}
|
||||
|
||||
type KnownWordQueryScope = {
|
||||
query: string;
|
||||
fields: string[];
|
||||
};
|
||||
|
||||
export class KnownWordCacheManager {
|
||||
private knownWordsLastRefreshedAtMs = 0;
|
||||
private knownWordsScope = '';
|
||||
private knownWordsStateKey = '';
|
||||
private knownWords: Set<string> = new Set();
|
||||
private wordReferenceCounts = new Map<string, number>();
|
||||
private noteWordsById = new Map<number, string[]>();
|
||||
private knownWordsRefreshTimer: ReturnType<typeof setInterval> | null = null;
|
||||
private knownWordsRefreshTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
private isRefreshingKnownWords = false;
|
||||
private readonly statePath: string;
|
||||
|
||||
@@ -72,7 +142,7 @@ export class KnownWordCacheManager {
|
||||
}
|
||||
|
||||
const refreshMinutes = this.getKnownWordRefreshIntervalMs() / 60_000;
|
||||
const scope = this.getKnownWordCacheScope();
|
||||
const scope = getKnownWordCacheScopeForConfig(this.deps.getConfig());
|
||||
log.info(
|
||||
'Known-word cache lifecycle enabled',
|
||||
`scope=${scope}`,
|
||||
@@ -81,14 +151,14 @@ export class KnownWordCacheManager {
|
||||
);
|
||||
|
||||
this.loadKnownWordCacheState();
|
||||
void this.refreshKnownWords();
|
||||
const refreshIntervalMs = this.getKnownWordRefreshIntervalMs();
|
||||
this.knownWordsRefreshTimer = setInterval(() => {
|
||||
void this.refreshKnownWords();
|
||||
}, refreshIntervalMs);
|
||||
this.scheduleKnownWordRefreshLifecycle();
|
||||
}
|
||||
|
||||
stopLifecycle(): void {
|
||||
if (this.knownWordsRefreshTimeout) {
|
||||
clearTimeout(this.knownWordsRefreshTimeout);
|
||||
this.knownWordsRefreshTimeout = null;
|
||||
}
|
||||
if (this.knownWordsRefreshTimer) {
|
||||
clearInterval(this.knownWordsRefreshTimer);
|
||||
this.knownWordsRefreshTimer = null;
|
||||
@@ -96,45 +166,44 @@ export class KnownWordCacheManager {
|
||||
}
|
||||
|
||||
appendFromNoteInfo(noteInfo: KnownWordCacheNoteInfo): void {
|
||||
if (!this.isKnownWordCacheEnabled()) {
|
||||
if (!this.isKnownWordCacheEnabled() || !this.shouldAddMinedWordsImmediately()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const currentScope = this.getKnownWordCacheScope();
|
||||
if (this.knownWordsScope && this.knownWordsScope !== currentScope) {
|
||||
const currentStateKey = this.getKnownWordCacheStateKey();
|
||||
if (this.knownWordsStateKey && this.knownWordsStateKey !== currentStateKey) {
|
||||
this.clearKnownWordCacheState();
|
||||
}
|
||||
if (!this.knownWordsScope) {
|
||||
this.knownWordsScope = currentScope;
|
||||
if (!this.knownWordsStateKey) {
|
||||
this.knownWordsStateKey = currentStateKey;
|
||||
}
|
||||
|
||||
let addedCount = 0;
|
||||
for (const rawWord of this.extractKnownWordsFromNoteInfo(noteInfo)) {
|
||||
const normalized = this.normalizeKnownWordForLookup(rawWord);
|
||||
if (!normalized || this.knownWords.has(normalized)) {
|
||||
continue;
|
||||
}
|
||||
this.knownWords.add(normalized);
|
||||
addedCount += 1;
|
||||
const preferredFields = this.getImmediateAppendFields();
|
||||
if (!preferredFields) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (addedCount > 0) {
|
||||
if (this.knownWordsLastRefreshedAtMs <= 0) {
|
||||
this.knownWordsLastRefreshedAtMs = Date.now();
|
||||
}
|
||||
this.persistKnownWordCacheState();
|
||||
log.info(
|
||||
'Known-word cache updated in-session',
|
||||
`added=${addedCount}`,
|
||||
`scope=${currentScope}`,
|
||||
);
|
||||
const nextWords = this.extractNormalizedKnownWordsFromNoteInfo(noteInfo, preferredFields);
|
||||
const changed = this.replaceNoteSnapshot(noteInfo.noteId, nextWords);
|
||||
if (!changed) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.knownWordsLastRefreshedAtMs <= 0) {
|
||||
this.knownWordsLastRefreshedAtMs = Date.now();
|
||||
}
|
||||
this.persistKnownWordCacheState();
|
||||
log.info(
|
||||
'Known-word cache updated in-session',
|
||||
`noteId=${noteInfo.noteId}`,
|
||||
`wordCount=${nextWords.length}`,
|
||||
`scope=${getKnownWordCacheScopeForConfig(this.deps.getConfig())}`,
|
||||
);
|
||||
}
|
||||
|
||||
clearKnownWordCacheState(): void {
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
try {
|
||||
if (fs.existsSync(this.statePath)) {
|
||||
fs.unlinkSync(this.statePath);
|
||||
@@ -158,41 +227,43 @@ export class KnownWordCacheManager {
|
||||
return;
|
||||
}
|
||||
|
||||
const frozenStateKey = this.getKnownWordCacheStateKey();
|
||||
this.isRefreshingKnownWords = true;
|
||||
try {
|
||||
const query = this.buildKnownWordsQuery();
|
||||
log.debug('Refreshing known-word cache', `query=${query}`);
|
||||
const noteIds = (await this.deps.client.findNotes(query, {
|
||||
maxRetries: 0,
|
||||
})) as number[];
|
||||
const noteFieldsById = await this.fetchKnownWordNoteFieldsById();
|
||||
const currentNoteIds = Array.from(noteFieldsById.keys()).sort((a, b) => a - b);
|
||||
|
||||
const nextKnownWords = new Set<string>();
|
||||
if (noteIds.length > 0) {
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < noteIds.length; i += chunkSize) {
|
||||
const chunk = noteIds.slice(i, i + chunkSize);
|
||||
const notesInfoResult = (await this.deps.client.notesInfo(chunk)) as unknown[];
|
||||
const notesInfo = notesInfoResult as KnownWordCacheNoteInfo[];
|
||||
if (this.noteWordsById.size === 0) {
|
||||
await this.rebuildFromCurrentNotes(currentNoteIds, noteFieldsById);
|
||||
} else {
|
||||
const currentNoteIdSet = new Set(currentNoteIds);
|
||||
for (const noteId of Array.from(this.noteWordsById.keys())) {
|
||||
if (!currentNoteIdSet.has(noteId)) {
|
||||
this.removeNoteSnapshot(noteId);
|
||||
}
|
||||
}
|
||||
|
||||
for (const noteInfo of notesInfo) {
|
||||
for (const word of this.extractKnownWordsFromNoteInfo(noteInfo)) {
|
||||
const normalized = this.normalizeKnownWordForLookup(word);
|
||||
if (normalized) {
|
||||
nextKnownWords.add(normalized);
|
||||
}
|
||||
}
|
||||
if (currentNoteIds.length > 0) {
|
||||
const noteInfos = await this.fetchKnownWordNotesInfo(currentNoteIds);
|
||||
for (const noteInfo of noteInfos) {
|
||||
this.replaceNoteSnapshot(
|
||||
noteInfo.noteId,
|
||||
this.extractNormalizedKnownWordsFromNoteInfo(
|
||||
noteInfo,
|
||||
noteFieldsById.get(noteInfo.noteId),
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.knownWords = nextKnownWords;
|
||||
this.knownWordsLastRefreshedAtMs = Date.now();
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.knownWordsStateKey = frozenStateKey;
|
||||
this.persistKnownWordCacheState();
|
||||
log.info(
|
||||
'Known-word cache refreshed',
|
||||
`noteCount=${noteIds.length}`,
|
||||
`wordCount=${nextKnownWords.size}`,
|
||||
`noteCount=${currentNoteIds.length}`,
|
||||
`wordCount=${this.knownWords.size}`,
|
||||
);
|
||||
} catch (error) {
|
||||
log.warn('Failed to refresh known-word cache:', (error as Error).message);
|
||||
@@ -203,32 +274,100 @@ export class KnownWordCacheManager {
|
||||
}
|
||||
|
||||
private isKnownWordCacheEnabled(): boolean {
|
||||
return this.deps.getConfig().nPlusOne?.highlightEnabled === true;
|
||||
return this.deps.getConfig().knownWords?.highlightEnabled === true;
|
||||
}
|
||||
|
||||
private shouldAddMinedWordsImmediately(): boolean {
|
||||
return this.deps.getConfig().knownWords?.addMinedWordsImmediately !== false;
|
||||
}
|
||||
|
||||
private getKnownWordRefreshIntervalMs(): number {
|
||||
const minutes = this.deps.getConfig().nPlusOne?.refreshMinutes;
|
||||
const safeMinutes =
|
||||
typeof minutes === 'number' && Number.isFinite(minutes) && minutes > 0
|
||||
? minutes
|
||||
: DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne.refreshMinutes;
|
||||
return safeMinutes * 60_000;
|
||||
return getKnownWordCacheRefreshIntervalMinutes(this.deps.getConfig()) * 60_000;
|
||||
}
|
||||
|
||||
private getDefaultKnownWordFields(): string[] {
|
||||
const configuredWordField = getConfiguredWordFieldName(this.deps.getConfig());
|
||||
return [...new Set([configuredWordField, 'Word', 'Reading', 'Word Reading'])];
|
||||
}
|
||||
|
||||
private getKnownWordDecks(): string[] {
|
||||
const configuredDecks = this.deps.getConfig().nPlusOne?.decks;
|
||||
if (Array.isArray(configuredDecks)) {
|
||||
const decks = configuredDecks
|
||||
.filter((entry): entry is string => typeof entry === 'string')
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => entry.length > 0);
|
||||
return [...new Set(decks)];
|
||||
const configuredDecks = this.deps.getConfig().knownWords?.decks;
|
||||
if (configuredDecks && typeof configuredDecks === 'object' && !Array.isArray(configuredDecks)) {
|
||||
return Object.keys(configuredDecks)
|
||||
.map((d) => d.trim())
|
||||
.filter((d) => d.length > 0);
|
||||
}
|
||||
|
||||
const deck = this.deps.getConfig().deck?.trim();
|
||||
return deck ? [deck] : [];
|
||||
}
|
||||
|
||||
private getConfiguredFields(): string[] {
|
||||
return this.getDefaultKnownWordFields();
|
||||
}
|
||||
|
||||
private getImmediateAppendFields(): string[] | null {
|
||||
const configuredDecks = this.deps.getConfig().knownWords?.decks;
|
||||
if (configuredDecks && typeof configuredDecks === 'object' && !Array.isArray(configuredDecks)) {
|
||||
const trimmedDeckEntries = Object.entries(configuredDecks)
|
||||
.map(([deckName, fields]) => [deckName.trim(), fields] as const)
|
||||
.filter(([deckName]) => deckName.length > 0);
|
||||
|
||||
const currentDeck = this.deps.getConfig().deck?.trim();
|
||||
const selectedDeckEntry =
|
||||
currentDeck !== undefined && currentDeck.length > 0
|
||||
? trimmedDeckEntries.find(([deckName]) => deckName === currentDeck) ?? null
|
||||
: trimmedDeckEntries.length === 1
|
||||
? trimmedDeckEntries[0] ?? null
|
||||
: null;
|
||||
|
||||
if (!selectedDeckEntry) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const deckFields = selectedDeckEntry[1];
|
||||
if (Array.isArray(deckFields)) {
|
||||
const normalizedFields = [
|
||||
...new Set(
|
||||
deckFields.map(String).map((field) => field.trim()).filter((field) => field.length > 0),
|
||||
),
|
||||
];
|
||||
if (normalizedFields.length > 0) {
|
||||
return normalizedFields;
|
||||
}
|
||||
}
|
||||
|
||||
return this.getDefaultKnownWordFields();
|
||||
}
|
||||
|
||||
return this.getConfiguredFields();
|
||||
}
|
||||
|
||||
private getKnownWordQueryScopes(): KnownWordQueryScope[] {
|
||||
const configuredDecks = this.deps.getConfig().knownWords?.decks;
|
||||
if (configuredDecks && typeof configuredDecks === 'object' && !Array.isArray(configuredDecks)) {
|
||||
const scopes: KnownWordQueryScope[] = [];
|
||||
for (const [deckName, fields] of Object.entries(configuredDecks)) {
|
||||
const trimmedDeckName = deckName.trim();
|
||||
if (!trimmedDeckName) {
|
||||
continue;
|
||||
}
|
||||
const normalizedFields = Array.isArray(fields)
|
||||
? [...new Set(fields.map(String).map((field) => field.trim()).filter(Boolean))]
|
||||
: [];
|
||||
scopes.push({
|
||||
query: `deck:"${escapeAnkiSearchValue(trimmedDeckName)}"`,
|
||||
fields: normalizedFields.length > 0 ? normalizedFields : this.getDefaultKnownWordFields(),
|
||||
});
|
||||
}
|
||||
if (scopes.length > 0) {
|
||||
return scopes;
|
||||
}
|
||||
}
|
||||
|
||||
return [{ query: this.buildKnownWordsQuery(), fields: this.getDefaultKnownWordFields() }];
|
||||
}
|
||||
|
||||
private buildKnownWordsQuery(): string {
|
||||
const decks = this.getKnownWordDecks();
|
||||
if (decks.length === 0) {
|
||||
@@ -243,19 +382,15 @@ export class KnownWordCacheManager {
|
||||
return `(${deckQueries.join(' OR ')})`;
|
||||
}
|
||||
|
||||
private getKnownWordCacheScope(): string {
|
||||
const decks = this.getKnownWordDecks();
|
||||
if (decks.length === 0) {
|
||||
return 'is:note';
|
||||
}
|
||||
return `decks:${JSON.stringify(decks)}`;
|
||||
private getKnownWordCacheStateKey(): string {
|
||||
return getKnownWordCacheLifecycleConfig(this.deps.getConfig());
|
||||
}
|
||||
|
||||
private isKnownWordCacheStale(): boolean {
|
||||
if (!this.isKnownWordCacheEnabled()) {
|
||||
return true;
|
||||
}
|
||||
if (this.knownWordsScope !== this.getKnownWordCacheScope()) {
|
||||
if (this.knownWordsStateKey !== this.getKnownWordCacheStateKey()) {
|
||||
return true;
|
||||
}
|
||||
if (this.knownWordsLastRefreshedAtMs <= 0) {
|
||||
@@ -264,64 +399,231 @@ export class KnownWordCacheManager {
|
||||
return Date.now() - this.knownWordsLastRefreshedAtMs >= this.getKnownWordRefreshIntervalMs();
|
||||
}
|
||||
|
||||
private async fetchKnownWordNoteFieldsById(): Promise<Map<number, string[]>> {
|
||||
const scopes = this.getKnownWordQueryScopes();
|
||||
const noteFieldsById = new Map<number, string[]>();
|
||||
log.debug('Refreshing known-word cache', `queries=${scopes.map((scope) => scope.query).join(' | ')}`);
|
||||
|
||||
for (const scope of scopes) {
|
||||
const noteIds = (await this.deps.client.findNotes(scope.query, {
|
||||
maxRetries: 0,
|
||||
})) as number[];
|
||||
|
||||
for (const noteId of noteIds) {
|
||||
if (!Number.isInteger(noteId) || noteId <= 0) {
|
||||
continue;
|
||||
}
|
||||
const existingFields = noteFieldsById.get(noteId) ?? [];
|
||||
noteFieldsById.set(
|
||||
noteId,
|
||||
[...new Set([...existingFields, ...scope.fields])],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return noteFieldsById;
|
||||
}
|
||||
|
||||
private scheduleKnownWordRefreshLifecycle(): void {
|
||||
const refreshIntervalMs = this.getKnownWordRefreshIntervalMs();
|
||||
const scheduleInterval = () => {
|
||||
this.knownWordsRefreshTimer = setInterval(() => {
|
||||
void this.refreshKnownWords();
|
||||
}, refreshIntervalMs);
|
||||
};
|
||||
|
||||
const initialDelayMs = this.getMsUntilNextRefresh();
|
||||
this.knownWordsRefreshTimeout = setTimeout(() => {
|
||||
this.knownWordsRefreshTimeout = null;
|
||||
void this.refreshKnownWords();
|
||||
scheduleInterval();
|
||||
}, initialDelayMs);
|
||||
}
|
||||
|
||||
private getMsUntilNextRefresh(): number {
|
||||
if (this.knownWordsStateKey !== this.getKnownWordCacheStateKey()) {
|
||||
return 0;
|
||||
}
|
||||
if (this.knownWordsLastRefreshedAtMs <= 0) {
|
||||
return 0;
|
||||
}
|
||||
const remainingMs =
|
||||
this.getKnownWordRefreshIntervalMs() - (Date.now() - this.knownWordsLastRefreshedAtMs);
|
||||
return Math.max(0, remainingMs);
|
||||
}
|
||||
|
||||
private async rebuildFromCurrentNotes(
|
||||
noteIds: number[],
|
||||
noteFieldsById: Map<number, string[]>,
|
||||
): Promise<void> {
|
||||
this.clearInMemoryState();
|
||||
if (noteIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const noteInfos = await this.fetchKnownWordNotesInfo(noteIds);
|
||||
for (const noteInfo of noteInfos) {
|
||||
this.replaceNoteSnapshot(
|
||||
noteInfo.noteId,
|
||||
this.extractNormalizedKnownWordsFromNoteInfo(noteInfo, noteFieldsById.get(noteInfo.noteId)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private async fetchKnownWordNotesInfo(noteIds: number[]): Promise<KnownWordCacheNoteInfo[]> {
|
||||
const noteInfos: KnownWordCacheNoteInfo[] = [];
|
||||
const chunkSize = 50;
|
||||
for (let i = 0; i < noteIds.length; i += chunkSize) {
|
||||
const chunk = noteIds.slice(i, i + chunkSize);
|
||||
const notesInfoResult = (await this.deps.client.notesInfo(chunk)) as unknown[];
|
||||
const chunkInfos = notesInfoResult as KnownWordCacheNoteInfo[];
|
||||
for (const noteInfo of chunkInfos) {
|
||||
if (
|
||||
!noteInfo ||
|
||||
!Number.isInteger(noteInfo.noteId) ||
|
||||
noteInfo.noteId <= 0 ||
|
||||
typeof noteInfo.fields !== 'object' ||
|
||||
noteInfo.fields === null ||
|
||||
Array.isArray(noteInfo.fields)
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
noteInfos.push(noteInfo);
|
||||
}
|
||||
}
|
||||
return noteInfos;
|
||||
}
|
||||
|
||||
private replaceNoteSnapshot(noteId: number, nextWords: string[]): boolean {
|
||||
const normalizedWords = normalizeKnownWordList(nextWords);
|
||||
const previousWords = this.noteWordsById.get(noteId) ?? [];
|
||||
if (knownWordListsEqual(previousWords, normalizedWords)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
this.removeWordsFromCounts(previousWords);
|
||||
if (normalizedWords.length > 0) {
|
||||
this.noteWordsById.set(noteId, normalizedWords);
|
||||
this.addWordsToCounts(normalizedWords);
|
||||
} else {
|
||||
this.noteWordsById.delete(noteId);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private removeNoteSnapshot(noteId: number): void {
|
||||
const previousWords = this.noteWordsById.get(noteId);
|
||||
if (!previousWords) {
|
||||
return;
|
||||
}
|
||||
this.noteWordsById.delete(noteId);
|
||||
this.removeWordsFromCounts(previousWords);
|
||||
}
|
||||
|
||||
private addWordsToCounts(words: string[]): void {
|
||||
for (const word of words) {
|
||||
const nextCount = (this.wordReferenceCounts.get(word) ?? 0) + 1;
|
||||
this.wordReferenceCounts.set(word, nextCount);
|
||||
this.knownWords.add(word);
|
||||
}
|
||||
}
|
||||
|
||||
private removeWordsFromCounts(words: string[]): void {
|
||||
for (const word of words) {
|
||||
const nextCount = (this.wordReferenceCounts.get(word) ?? 0) - 1;
|
||||
if (nextCount > 0) {
|
||||
this.wordReferenceCounts.set(word, nextCount);
|
||||
} else {
|
||||
this.wordReferenceCounts.delete(word);
|
||||
this.knownWords.delete(word);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private clearInMemoryState(): void {
|
||||
this.knownWords = new Set();
|
||||
this.wordReferenceCounts = new Map();
|
||||
this.noteWordsById = new Map();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
}
|
||||
|
||||
private loadKnownWordCacheState(): void {
|
||||
try {
|
||||
if (!fs.existsSync(this.statePath)) {
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
return;
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(this.statePath, 'utf-8');
|
||||
if (!raw.trim()) {
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
return;
|
||||
}
|
||||
|
||||
const parsed = JSON.parse(raw) as unknown;
|
||||
if (!this.isKnownWordCacheStateValid(parsed)) {
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
return;
|
||||
}
|
||||
|
||||
if (parsed.scope !== this.getKnownWordCacheScope()) {
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
if (parsed.scope !== this.getKnownWordCacheStateKey()) {
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
return;
|
||||
}
|
||||
|
||||
const nextKnownWords = new Set<string>();
|
||||
for (const value of parsed.words) {
|
||||
const normalized = this.normalizeKnownWordForLookup(value);
|
||||
if (normalized) {
|
||||
nextKnownWords.add(normalized);
|
||||
this.clearInMemoryState();
|
||||
if (parsed.version === 2) {
|
||||
for (const [noteIdKey, words] of Object.entries(parsed.notes)) {
|
||||
const noteId = Number.parseInt(noteIdKey, 10);
|
||||
if (!Number.isInteger(noteId) || noteId <= 0) {
|
||||
continue;
|
||||
}
|
||||
const normalizedWords = normalizeKnownWordList(words);
|
||||
if (normalizedWords.length === 0) {
|
||||
continue;
|
||||
}
|
||||
this.noteWordsById.set(noteId, normalizedWords);
|
||||
this.addWordsToCounts(normalizedWords);
|
||||
}
|
||||
} else {
|
||||
for (const value of parsed.words) {
|
||||
const normalized = this.normalizeKnownWordForLookup(value);
|
||||
if (!normalized) {
|
||||
continue;
|
||||
}
|
||||
this.knownWords.add(normalized);
|
||||
this.wordReferenceCounts.set(normalized, 1);
|
||||
}
|
||||
}
|
||||
|
||||
this.knownWords = nextKnownWords;
|
||||
this.knownWordsLastRefreshedAtMs = parsed.refreshedAtMs;
|
||||
this.knownWordsScope = parsed.scope;
|
||||
this.knownWordsStateKey = parsed.scope;
|
||||
} catch (error) {
|
||||
log.warn('Failed to load known-word cache state:', (error as Error).message);
|
||||
this.knownWords = new Set();
|
||||
this.knownWordsLastRefreshedAtMs = 0;
|
||||
this.knownWordsScope = this.getKnownWordCacheScope();
|
||||
this.clearInMemoryState();
|
||||
this.knownWordsStateKey = this.getKnownWordCacheStateKey();
|
||||
}
|
||||
}
|
||||
|
||||
private persistKnownWordCacheState(): void {
|
||||
try {
|
||||
const state: KnownWordCacheState = {
|
||||
version: 1,
|
||||
const notes: Record<string, string[]> = {};
|
||||
for (const [noteId, words] of this.noteWordsById.entries()) {
|
||||
if (words.length > 0) {
|
||||
notes[String(noteId)] = words;
|
||||
}
|
||||
}
|
||||
|
||||
const state: KnownWordCacheStateV2 = {
|
||||
version: 2,
|
||||
refreshedAtMs: this.knownWordsLastRefreshedAtMs,
|
||||
scope: this.knownWordsScope,
|
||||
scope: this.knownWordsStateKey,
|
||||
words: Array.from(this.knownWords),
|
||||
notes,
|
||||
};
|
||||
fs.writeFileSync(this.statePath, JSON.stringify(state), 'utf-8');
|
||||
} catch (error) {
|
||||
@@ -331,20 +633,39 @@ export class KnownWordCacheManager {
|
||||
|
||||
private isKnownWordCacheStateValid(value: unknown): value is KnownWordCacheState {
|
||||
if (typeof value !== 'object' || value === null) return false;
|
||||
const candidate = value as Partial<KnownWordCacheState>;
|
||||
if (candidate.version !== 1) return false;
|
||||
const candidate = value as Record<string, unknown>;
|
||||
if (candidate.version !== 1 && candidate.version !== 2) return false;
|
||||
if (typeof candidate.refreshedAtMs !== 'number') return false;
|
||||
if (typeof candidate.scope !== 'string') return false;
|
||||
if (!Array.isArray(candidate.words)) return false;
|
||||
if (!candidate.words.every((entry) => typeof entry === 'string')) {
|
||||
if (!candidate.words.every((entry: unknown) => typeof entry === 'string')) {
|
||||
return false;
|
||||
}
|
||||
if (candidate.version === 2) {
|
||||
if (
|
||||
typeof candidate.notes !== 'object' ||
|
||||
candidate.notes === null ||
|
||||
Array.isArray(candidate.notes)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
!Object.values(candidate.notes as Record<string, unknown>).every(
|
||||
(entry) =>
|
||||
Array.isArray(entry) && entry.every((word: unknown) => typeof word === 'string'),
|
||||
)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private extractKnownWordsFromNoteInfo(noteInfo: KnownWordCacheNoteInfo): string[] {
|
||||
private extractNormalizedKnownWordsFromNoteInfo(
|
||||
noteInfo: KnownWordCacheNoteInfo,
|
||||
preferredFields = this.getConfiguredFields(),
|
||||
): string[] {
|
||||
const words: string[] = [];
|
||||
const preferredFields = ['Expression', 'Word'];
|
||||
for (const preferredField of preferredFields) {
|
||||
const fieldName = resolveFieldName(Object.keys(noteInfo.fields), preferredField);
|
||||
if (!fieldName) continue;
|
||||
@@ -352,12 +673,12 @@ export class KnownWordCacheManager {
|
||||
const raw = noteInfo.fields[fieldName]?.value;
|
||||
if (!raw) continue;
|
||||
|
||||
const extracted = this.normalizeRawKnownWordValue(raw);
|
||||
if (extracted) {
|
||||
words.push(extracted);
|
||||
const normalized = this.normalizeKnownWordForLookup(raw);
|
||||
if (normalized) {
|
||||
words.push(normalized);
|
||||
}
|
||||
}
|
||||
return words;
|
||||
return normalizeKnownWordList(words);
|
||||
}
|
||||
|
||||
private normalizeRawKnownWordValue(value: string): string {
|
||||
@@ -372,6 +693,22 @@ export class KnownWordCacheManager {
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeKnownWordList(words: string[]): string[] {
|
||||
return [...new Set(words.map((word) => word.trim()).filter((word) => word.length > 0))].sort();
|
||||
}
|
||||
|
||||
function knownWordListsEqual(left: string[], right: string[]): boolean {
|
||||
if (left.length !== right.length) {
|
||||
return false;
|
||||
}
|
||||
for (let index = 0; index < left.length; index += 1) {
|
||||
if (left[index] !== right[index]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function resolveFieldName(availableFieldNames: string[], preferredName: string): string | null {
|
||||
const exact = availableFieldNames.find((name) => name === preferredName);
|
||||
if (exact) return exact;
|
||||
|
||||
@@ -62,6 +62,7 @@ function createWorkflowHarness() {
|
||||
return names.find((name) => name.toLowerCase() === preferred.toLowerCase()) ?? null;
|
||||
},
|
||||
getResolvedSentenceAudioFieldName: () => null,
|
||||
getAnimatedImageLeadInSeconds: async () => 0,
|
||||
mergeFieldValue: (_existing: string, next: string, _overwrite: boolean) => next,
|
||||
generateAudioFilename: () => 'audio_1.mp3',
|
||||
generateAudio: async () => null,
|
||||
@@ -163,3 +164,42 @@ test('NoteUpdateWorkflow updates note before auto field grouping merge', async (
|
||||
assert.deepEqual(callOrder, ['update', 'auto']);
|
||||
assert.equal(harness.updates.length, 1);
|
||||
});
|
||||
|
||||
test('NoteUpdateWorkflow passes animated image lead-in when syncing avif to word audio', async () => {
|
||||
const harness = createWorkflowHarness();
|
||||
let receivedLeadInSeconds = 0;
|
||||
|
||||
harness.deps.client.notesInfo = async () =>
|
||||
[
|
||||
{
|
||||
noteId: 42,
|
||||
fields: {
|
||||
Expression: { value: 'taberu' },
|
||||
ExpressionAudio: { value: '[sound:word.mp3]' },
|
||||
Sentence: { value: '' },
|
||||
Picture: { value: '' },
|
||||
},
|
||||
},
|
||||
] satisfies NoteUpdateWorkflowNoteInfo[];
|
||||
harness.deps.getConfig = () => ({
|
||||
fields: {
|
||||
sentence: 'Sentence',
|
||||
image: 'Picture',
|
||||
},
|
||||
media: {
|
||||
generateImage: true,
|
||||
imageType: 'avif',
|
||||
syncAnimatedImageToWordAudio: true,
|
||||
},
|
||||
behavior: {},
|
||||
});
|
||||
harness.deps.getAnimatedImageLeadInSeconds = async () => 1.25;
|
||||
harness.deps.generateImage = async (leadInSeconds?: number) => {
|
||||
receivedLeadInSeconds = leadInSeconds ?? 0;
|
||||
return Buffer.from('image');
|
||||
};
|
||||
|
||||
await harness.workflow.execute(42);
|
||||
|
||||
assert.equal(receivedLeadInSeconds, 1.25);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
||||
import { getPreferredWordValueFromExtractedFields } from '../anki-field-config';
|
||||
|
||||
export interface NoteUpdateWorkflowNoteInfo {
|
||||
noteId: number;
|
||||
@@ -13,6 +14,7 @@ export interface NoteUpdateWorkflowDeps {
|
||||
};
|
||||
getConfig: () => {
|
||||
fields?: {
|
||||
word?: string;
|
||||
sentence?: string;
|
||||
image?: string;
|
||||
miscInfo?: string;
|
||||
@@ -20,6 +22,8 @@ export interface NoteUpdateWorkflowDeps {
|
||||
media?: {
|
||||
generateAudio?: boolean;
|
||||
generateImage?: boolean;
|
||||
imageType?: 'static' | 'avif';
|
||||
syncAnimatedImageToWordAudio?: boolean;
|
||||
};
|
||||
behavior?: {
|
||||
overwriteAudio?: boolean;
|
||||
@@ -58,11 +62,12 @@ export interface NoteUpdateWorkflowDeps {
|
||||
...preferredNames: (string | undefined)[]
|
||||
) => string | null;
|
||||
getResolvedSentenceAudioFieldName: (noteInfo: NoteUpdateWorkflowNoteInfo) => string | null;
|
||||
getAnimatedImageLeadInSeconds: (noteInfo: NoteUpdateWorkflowNoteInfo) => Promise<number>;
|
||||
mergeFieldValue: (existing: string, newValue: string, overwrite: boolean) => string;
|
||||
generateAudioFilename: () => string;
|
||||
generateAudio: () => Promise<Buffer | null>;
|
||||
generateImageFilename: () => string;
|
||||
generateImage: () => Promise<Buffer | null>;
|
||||
generateImage: (animatedLeadInSeconds?: number) => Promise<Buffer | null>;
|
||||
formatMiscInfoPattern: (fallbackFilename: string, startTimeSeconds?: number) => string;
|
||||
addConfiguredTagsToNote: (noteId: number) => Promise<void>;
|
||||
showNotification: (noteId: number, label: string | number) => Promise<void>;
|
||||
@@ -90,8 +95,9 @@ export class NoteUpdateWorkflow {
|
||||
const noteInfo = notesInfo[0]!;
|
||||
this.deps.appendKnownWordsFromNoteInfo(noteInfo);
|
||||
const fields = this.deps.extractFields(noteInfo.fields);
|
||||
const config = this.deps.getConfig();
|
||||
|
||||
const expressionText = (fields.expression || fields.word || '').trim();
|
||||
const expressionText = getPreferredWordValueFromExtractedFields(fields, config).trim();
|
||||
const hasExpressionText = expressionText.length > 0;
|
||||
if (!hasExpressionText) {
|
||||
// Some note types omit Expression/Word; still run enrichment updates and skip duplicate checks.
|
||||
@@ -123,8 +129,6 @@ export class NoteUpdateWorkflow {
|
||||
updatePerformed = true;
|
||||
}
|
||||
|
||||
const config = this.deps.getConfig();
|
||||
|
||||
if (config.media?.generateAudio) {
|
||||
try {
|
||||
const audioFilename = this.deps.generateAudioFilename();
|
||||
@@ -152,8 +156,9 @@ export class NoteUpdateWorkflow {
|
||||
|
||||
if (config.media?.generateImage) {
|
||||
try {
|
||||
const animatedLeadInSeconds = await this.deps.getAnimatedImageLeadInSeconds(noteInfo);
|
||||
const imageFilename = this.deps.generateImageFilename();
|
||||
const imageBuffer = await this.deps.generateImage();
|
||||
const imageBuffer = await this.deps.generateImage(animatedLeadInSeconds);
|
||||
|
||||
if (imageBuffer) {
|
||||
await this.deps.client.storeMediaFile(imageFilename, imageBuffer);
|
||||
|
||||
38
src/anki-integration/polling.test.ts
Normal file
38
src/anki-integration/polling.test.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { PollingRunner } from './polling';
|
||||
|
||||
test('polling runner records newly added cards after initialization', async () => {
|
||||
const recordedCards: number[] = [];
|
||||
let tracked = new Set<number>();
|
||||
const responses = [
|
||||
[10, 11],
|
||||
[10, 11, 12, 13],
|
||||
];
|
||||
const runner = new PollingRunner({
|
||||
getDeck: () => 'Mining',
|
||||
getPollingRate: () => 250,
|
||||
findNotes: async () => responses.shift() ?? [],
|
||||
shouldAutoUpdateNewCards: () => true,
|
||||
processNewCard: async () => undefined,
|
||||
recordCardsAdded: (count) => {
|
||||
recordedCards.push(count);
|
||||
},
|
||||
isUpdateInProgress: () => false,
|
||||
setUpdateInProgress: () => undefined,
|
||||
getTrackedNoteIds: () => tracked,
|
||||
setTrackedNoteIds: (noteIds) => {
|
||||
tracked = noteIds;
|
||||
},
|
||||
showStatusNotification: () => undefined,
|
||||
logDebug: () => undefined,
|
||||
logInfo: () => undefined,
|
||||
logWarn: () => undefined,
|
||||
});
|
||||
|
||||
await runner.pollOnce();
|
||||
await runner.pollOnce();
|
||||
|
||||
assert.deepEqual(recordedCards, [2]);
|
||||
});
|
||||
@@ -9,6 +9,7 @@ export interface PollingRunnerDeps {
|
||||
) => Promise<number[]>;
|
||||
shouldAutoUpdateNewCards: () => boolean;
|
||||
processNewCard: (noteId: number) => Promise<void>;
|
||||
recordCardsAdded?: (count: number, noteIds: number[]) => void;
|
||||
isUpdateInProgress: () => boolean;
|
||||
setUpdateInProgress: (value: boolean) => void;
|
||||
getTrackedNoteIds: () => Set<number>;
|
||||
@@ -80,6 +81,7 @@ export class PollingRunner {
|
||||
previousNoteIds.add(noteId);
|
||||
}
|
||||
this.deps.setTrackedNoteIds(previousNoteIds);
|
||||
this.deps.recordCardsAdded?.(newNoteIds.length, newNoteIds);
|
||||
|
||||
if (this.deps.shouldAutoUpdateNewCards()) {
|
||||
for (const noteId of newNoteIds) {
|
||||
|
||||
@@ -59,6 +59,10 @@ test('AnkiIntegrationRuntime normalizes url and proxy defaults', () => {
|
||||
normalized.media?.fallbackDuration,
|
||||
DEFAULT_ANKI_CONNECT_CONFIG.media.fallbackDuration,
|
||||
);
|
||||
assert.equal(
|
||||
normalized.media?.syncAnimatedImageToWordAudio,
|
||||
DEFAULT_ANKI_CONNECT_CONFIG.media.syncAnimatedImageToWordAudio,
|
||||
);
|
||||
});
|
||||
|
||||
test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled', () => {
|
||||
@@ -78,7 +82,7 @@ test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled',
|
||||
|
||||
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
|
||||
const { runtime, calls } = createRuntime({
|
||||
nPlusOne: {
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
pollingRate: 250,
|
||||
@@ -88,7 +92,7 @@ test('AnkiIntegrationRuntime switches transports and clears known words when run
|
||||
calls.length = 0;
|
||||
|
||||
runtime.applyRuntimeConfigPatch({
|
||||
nPlusOne: {
|
||||
knownWords: {
|
||||
highlightEnabled: false,
|
||||
},
|
||||
proxy: {
|
||||
@@ -106,3 +110,77 @@ test('AnkiIntegrationRuntime switches transports and clears known words when run
|
||||
'proxy:start:127.0.0.1:8766:http://127.0.0.1:8765',
|
||||
]);
|
||||
});
|
||||
|
||||
test('AnkiIntegrationRuntime skips known-word lifecycle restart for unrelated runtime patches', () => {
|
||||
const { runtime, calls } = createRuntime({
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
pollingRate: 250,
|
||||
});
|
||||
|
||||
runtime.start();
|
||||
calls.length = 0;
|
||||
|
||||
runtime.applyRuntimeConfigPatch({
|
||||
behavior: {
|
||||
autoUpdateNewCards: false,
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, []);
|
||||
});
|
||||
|
||||
test('AnkiIntegrationRuntime restarts known-word lifecycle when known-word settings change', () => {
|
||||
const { runtime, calls } = createRuntime({
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
refreshMinutes: 90,
|
||||
},
|
||||
pollingRate: 250,
|
||||
});
|
||||
|
||||
runtime.start();
|
||||
calls.length = 0;
|
||||
|
||||
runtime.applyRuntimeConfigPatch({
|
||||
knownWords: {
|
||||
refreshMinutes: 120,
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, ['known:start']);
|
||||
});
|
||||
|
||||
test('AnkiIntegrationRuntime does not stop lifecycle when disabled while runtime is stopped', () => {
|
||||
const { runtime, calls } = createRuntime({
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
},
|
||||
});
|
||||
|
||||
runtime.applyRuntimeConfigPatch({
|
||||
knownWords: {
|
||||
highlightEnabled: false,
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, ['known:clear']);
|
||||
});
|
||||
|
||||
test('AnkiIntegrationRuntime does not restart known-word lifecycle for config changes while stopped', () => {
|
||||
const { runtime, calls } = createRuntime({
|
||||
knownWords: {
|
||||
highlightEnabled: true,
|
||||
refreshMinutes: 90,
|
||||
},
|
||||
});
|
||||
|
||||
runtime.applyRuntimeConfigPatch({
|
||||
knownWords: {
|
||||
refreshMinutes: 120,
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, []);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { DEFAULT_ANKI_CONNECT_CONFIG } from '../config';
|
||||
import type { AnkiConnectConfig } from '../types';
|
||||
import {
|
||||
getKnownWordCacheLifecycleConfig,
|
||||
getKnownWordCacheRefreshIntervalMinutes,
|
||||
getKnownWordCacheScopeForConfig,
|
||||
} from './known-word-cache';
|
||||
|
||||
export interface AnkiIntegrationRuntimeProxyServer {
|
||||
start(options: { host: string; port: number; upstreamUrl: string }): void;
|
||||
@@ -86,6 +91,14 @@ export function normalizeAnkiIntegrationConfig(config: AnkiConnectConfig): AnkiC
|
||||
...DEFAULT_ANKI_CONNECT_CONFIG.media,
|
||||
...(config.media ?? {}),
|
||||
},
|
||||
knownWords: {
|
||||
...DEFAULT_ANKI_CONNECT_CONFIG.knownWords,
|
||||
...(config.knownWords ?? {}),
|
||||
},
|
||||
nPlusOne: {
|
||||
...DEFAULT_ANKI_CONNECT_CONFIG.nPlusOne,
|
||||
...(config.nPlusOne ?? {}),
|
||||
},
|
||||
behavior: {
|
||||
...DEFAULT_ANKI_CONNECT_CONFIG.behavior,
|
||||
...(config.behavior ?? {}),
|
||||
@@ -136,12 +149,22 @@ export class AnkiIntegrationRuntime {
|
||||
}
|
||||
|
||||
applyRuntimeConfigPatch(patch: Partial<AnkiConnectConfig>): void {
|
||||
const wasKnownWordCacheEnabled = this.config.nPlusOne?.highlightEnabled === true;
|
||||
const wasKnownWordCacheEnabled = this.config.knownWords?.highlightEnabled === true;
|
||||
const previousKnownWordCacheConfig = wasKnownWordCacheEnabled
|
||||
? this.getKnownWordCacheLifecycleConfig(this.config)
|
||||
: null;
|
||||
const previousTransportKey = this.getTransportConfigKey(this.config);
|
||||
|
||||
const mergedConfig: AnkiConnectConfig = {
|
||||
...this.config,
|
||||
...patch,
|
||||
knownWords:
|
||||
patch.knownWords !== undefined
|
||||
? {
|
||||
...(this.config.knownWords ?? DEFAULT_ANKI_CONNECT_CONFIG.knownWords),
|
||||
...patch.knownWords,
|
||||
}
|
||||
: this.config.knownWords,
|
||||
nPlusOne:
|
||||
patch.nPlusOne !== undefined
|
||||
? {
|
||||
@@ -176,11 +199,22 @@ export class AnkiIntegrationRuntime {
|
||||
};
|
||||
this.config = normalizeAnkiIntegrationConfig(mergedConfig);
|
||||
this.deps.onConfigChanged?.(this.config);
|
||||
const nextKnownWordCacheEnabled = this.config.knownWords?.highlightEnabled === true;
|
||||
|
||||
if (wasKnownWordCacheEnabled && this.config.nPlusOne?.highlightEnabled === false) {
|
||||
this.deps.knownWordCache.stopLifecycle();
|
||||
if (wasKnownWordCacheEnabled && !nextKnownWordCacheEnabled) {
|
||||
if (this.started) {
|
||||
this.deps.knownWordCache.stopLifecycle();
|
||||
}
|
||||
this.deps.knownWordCache.clearKnownWordCacheState();
|
||||
} else {
|
||||
} else if (this.started && !wasKnownWordCacheEnabled && nextKnownWordCacheEnabled) {
|
||||
this.deps.knownWordCache.startLifecycle();
|
||||
} else if (
|
||||
this.started &&
|
||||
wasKnownWordCacheEnabled &&
|
||||
nextKnownWordCacheEnabled &&
|
||||
previousKnownWordCacheConfig !== null &&
|
||||
previousKnownWordCacheConfig !== this.getKnownWordCacheLifecycleConfig(this.config)
|
||||
) {
|
||||
this.deps.knownWordCache.startLifecycle();
|
||||
}
|
||||
|
||||
@@ -191,6 +225,18 @@ export class AnkiIntegrationRuntime {
|
||||
}
|
||||
}
|
||||
|
||||
private getKnownWordCacheLifecycleConfig(config: AnkiConnectConfig): string {
|
||||
return getKnownWordCacheLifecycleConfig(config);
|
||||
}
|
||||
|
||||
private getKnownWordRefreshIntervalMinutes(config: AnkiConnectConfig): number {
|
||||
return getKnownWordCacheRefreshIntervalMinutes(config);
|
||||
}
|
||||
|
||||
private getKnownWordCacheScopeForConfig(config: AnkiConnectConfig): string {
|
||||
return getKnownWordCacheScopeForConfig(config);
|
||||
}
|
||||
|
||||
getOrCreateProxyServer(): AnkiIntegrationRuntimeProxyServer {
|
||||
if (!this.proxyServer) {
|
||||
this.proxyServer = this.deps.proxyServerFactory();
|
||||
|
||||
67
src/anki-integration/ui-feedback.test.ts
Normal file
67
src/anki-integration/ui-feedback.test.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import {
|
||||
beginUpdateProgress,
|
||||
createUiFeedbackState,
|
||||
showProgressTick,
|
||||
showUpdateResult,
|
||||
} from './ui-feedback';
|
||||
|
||||
test('showUpdateResult stops spinner before success notification and suppresses stale ticks', () => {
|
||||
const state = createUiFeedbackState();
|
||||
const osdMessages: string[] = [];
|
||||
|
||||
beginUpdateProgress(state, 'Creating sentence card', () => {
|
||||
showProgressTick(state, (text) => {
|
||||
osdMessages.push(text);
|
||||
});
|
||||
});
|
||||
|
||||
showUpdateResult(
|
||||
state,
|
||||
{
|
||||
clearProgressTimer: (timer) => {
|
||||
clearInterval(timer);
|
||||
},
|
||||
showOsdNotification: (text) => {
|
||||
osdMessages.push(text);
|
||||
},
|
||||
},
|
||||
{ success: true, message: 'Updated card: taberu' },
|
||||
);
|
||||
|
||||
showProgressTick(state, (text) => {
|
||||
osdMessages.push(text);
|
||||
});
|
||||
|
||||
assert.deepEqual(osdMessages, ['Creating sentence card |', '✓ Updated card: taberu']);
|
||||
});
|
||||
|
||||
test('showUpdateResult renders failed updates with an x marker', () => {
|
||||
const state = createUiFeedbackState();
|
||||
const osdMessages: string[] = [];
|
||||
|
||||
beginUpdateProgress(state, 'Creating sentence card', () => {
|
||||
showProgressTick(state, (text) => {
|
||||
osdMessages.push(text);
|
||||
});
|
||||
});
|
||||
|
||||
showUpdateResult(
|
||||
state,
|
||||
{
|
||||
clearProgressTimer: (timer) => {
|
||||
clearInterval(timer);
|
||||
},
|
||||
showOsdNotification: (text) => {
|
||||
osdMessages.push(text);
|
||||
},
|
||||
},
|
||||
{ success: false, message: 'Sentence card failed: deck missing' },
|
||||
);
|
||||
|
||||
assert.deepEqual(osdMessages, [
|
||||
'Creating sentence card |',
|
||||
'x Sentence card failed: deck missing',
|
||||
]);
|
||||
});
|
||||
@@ -7,6 +7,11 @@ export interface UiFeedbackState {
|
||||
progressFrame: number;
|
||||
}
|
||||
|
||||
export interface UiFeedbackResult {
|
||||
success: boolean;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface UiFeedbackNotificationContext {
|
||||
getNotificationType: () => string | undefined;
|
||||
showOsd: (text: string) => void;
|
||||
@@ -66,6 +71,15 @@ export function endUpdateProgress(
|
||||
state.progressDepth = Math.max(0, state.progressDepth - 1);
|
||||
if (state.progressDepth > 0) return;
|
||||
|
||||
clearUpdateProgress(state, clearProgressTimer);
|
||||
}
|
||||
|
||||
export function clearUpdateProgress(
|
||||
state: UiFeedbackState,
|
||||
clearProgressTimer: (timer: ReturnType<typeof setInterval>) => void,
|
||||
): void {
|
||||
state.progressDepth = 0;
|
||||
|
||||
if (state.progressTimer) {
|
||||
clearProgressTimer(state.progressTimer);
|
||||
state.progressTimer = null;
|
||||
@@ -85,6 +99,19 @@ export function showProgressTick(
|
||||
showOsdNotification(`${state.progressMessage} ${frame}`);
|
||||
}
|
||||
|
||||
export function showUpdateResult(
|
||||
state: UiFeedbackState,
|
||||
options: {
|
||||
clearProgressTimer: (timer: ReturnType<typeof setInterval>) => void;
|
||||
showOsdNotification: (text: string) => void;
|
||||
},
|
||||
result: UiFeedbackResult,
|
||||
): void {
|
||||
clearUpdateProgress(state, options.clearProgressTimer);
|
||||
const prefix = result.success ? '✓' : 'x';
|
||||
options.showOsdNotification(`${prefix} ${result.message}`);
|
||||
}
|
||||
|
||||
export async function withUpdateProgress<T>(
|
||||
state: UiFeedbackState,
|
||||
options: UiFeedbackOptions,
|
||||
|
||||
@@ -2,6 +2,7 @@ import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import {
|
||||
hasExplicitCommand,
|
||||
isHeadlessInitialCommand,
|
||||
parseArgs,
|
||||
shouldRunSettingsOnlyStartup,
|
||||
shouldStartApp,
|
||||
@@ -101,7 +102,8 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
|
||||
const refreshKnownWords = parseArgs(['--refresh-known-words']);
|
||||
assert.equal(refreshKnownWords.help, false);
|
||||
assert.equal(hasExplicitCommand(refreshKnownWords), true);
|
||||
assert.equal(shouldStartApp(refreshKnownWords), false);
|
||||
assert.equal(shouldStartApp(refreshKnownWords), true);
|
||||
assert.equal(isHeadlessInitialCommand(refreshKnownWords), true);
|
||||
|
||||
const settings = parseArgs(['--settings']);
|
||||
assert.equal(settings.settings, true);
|
||||
@@ -143,6 +145,50 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
|
||||
assert.equal(dictionaryTarget.dictionary, true);
|
||||
assert.equal(dictionaryTarget.dictionaryTarget, '/tmp/example.mkv');
|
||||
|
||||
const stats = parseArgs([
|
||||
'--stats',
|
||||
'--stats-response-path',
|
||||
'/tmp/subminer-stats-response.json',
|
||||
'--stats-cleanup-lifetime',
|
||||
]);
|
||||
assert.equal(stats.stats, true);
|
||||
assert.equal(stats.statsResponsePath, '/tmp/subminer-stats-response.json');
|
||||
assert.equal(stats.statsCleanup, false);
|
||||
assert.equal(stats.statsCleanupVocab, false);
|
||||
assert.equal(stats.statsCleanupLifetime, true);
|
||||
assert.equal(hasExplicitCommand(stats), true);
|
||||
assert.equal(shouldStartApp(stats), true);
|
||||
|
||||
const statsBackground = parseArgs(['--stats', '--stats-background']) as typeof stats & {
|
||||
statsBackground?: boolean;
|
||||
statsStop?: boolean;
|
||||
};
|
||||
assert.equal(statsBackground.stats, true);
|
||||
assert.equal(statsBackground.statsBackground, true);
|
||||
assert.equal(statsBackground.statsStop, false);
|
||||
assert.equal(hasExplicitCommand(statsBackground), true);
|
||||
assert.equal(shouldStartApp(statsBackground), true);
|
||||
|
||||
const statsStop = parseArgs(['--stats', '--stats-stop']) as typeof stats & {
|
||||
statsBackground?: boolean;
|
||||
statsStop?: boolean;
|
||||
};
|
||||
assert.equal(statsStop.stats, true);
|
||||
assert.equal(statsStop.statsStop, true);
|
||||
assert.equal(statsStop.statsBackground, false);
|
||||
assert.equal(hasExplicitCommand(statsStop), true);
|
||||
assert.equal(shouldStartApp(statsStop), true);
|
||||
|
||||
const statsLifetimeRebuild = parseArgs([
|
||||
'--stats',
|
||||
'--stats-cleanup',
|
||||
'--stats-cleanup-lifetime',
|
||||
]);
|
||||
assert.equal(statsLifetimeRebuild.stats, true);
|
||||
assert.equal(statsLifetimeRebuild.statsCleanup, true);
|
||||
assert.equal(statsLifetimeRebuild.statsCleanupLifetime, true);
|
||||
assert.equal(statsLifetimeRebuild.statsCleanupVocab, false);
|
||||
|
||||
const jellyfinLibraries = parseArgs(['--jellyfin-libraries']);
|
||||
assert.equal(jellyfinLibraries.jellyfinLibraries, true);
|
||||
assert.equal(hasExplicitCommand(jellyfinLibraries), true);
|
||||
|
||||
@@ -29,6 +29,13 @@ export interface CliArgs {
|
||||
anilistRetryQueue: boolean;
|
||||
dictionary: boolean;
|
||||
dictionaryTarget?: string;
|
||||
stats: boolean;
|
||||
statsBackground?: boolean;
|
||||
statsStop?: boolean;
|
||||
statsCleanup?: boolean;
|
||||
statsCleanupVocab?: boolean;
|
||||
statsCleanupLifetime?: boolean;
|
||||
statsResponsePath?: string;
|
||||
jellyfin: boolean;
|
||||
jellyfinLogin: boolean;
|
||||
jellyfinLogout: boolean;
|
||||
@@ -97,6 +104,12 @@ export function parseArgs(argv: string[]): CliArgs {
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
stats: false,
|
||||
statsBackground: false,
|
||||
statsStop: false,
|
||||
statsCleanup: false,
|
||||
statsCleanupVocab: false,
|
||||
statsCleanupLifetime: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
@@ -162,6 +175,22 @@ export function parseArgs(argv: string[]): CliArgs {
|
||||
} else if (arg === '--dictionary-target') {
|
||||
const value = readValue(argv[i + 1]);
|
||||
if (value) args.dictionaryTarget = value;
|
||||
} else if (arg === '--stats') args.stats = true;
|
||||
else if (arg === '--stats-background') {
|
||||
args.stats = true;
|
||||
args.statsBackground = true;
|
||||
} else if (arg === '--stats-stop') {
|
||||
args.stats = true;
|
||||
args.statsStop = true;
|
||||
} else if (arg === '--stats-cleanup') args.statsCleanup = true;
|
||||
else if (arg === '--stats-cleanup-vocab') args.statsCleanupVocab = true;
|
||||
else if (arg === '--stats-cleanup-lifetime') args.statsCleanupLifetime = true;
|
||||
else if (arg.startsWith('--stats-response-path=')) {
|
||||
const value = arg.split('=', 2)[1];
|
||||
if (value) args.statsResponsePath = value;
|
||||
} else if (arg === '--stats-response-path') {
|
||||
const value = readValue(argv[i + 1]);
|
||||
if (value) args.statsResponsePath = value;
|
||||
} else if (arg === '--jellyfin') args.jellyfin = true;
|
||||
else if (arg === '--jellyfin-login') args.jellyfinLogin = true;
|
||||
else if (arg === '--jellyfin-logout') args.jellyfinLogout = true;
|
||||
@@ -331,6 +360,7 @@ export function hasExplicitCommand(args: CliArgs): boolean {
|
||||
args.anilistSetup ||
|
||||
args.anilistRetryQueue ||
|
||||
args.dictionary ||
|
||||
args.stats ||
|
||||
args.jellyfin ||
|
||||
args.jellyfinLogin ||
|
||||
args.jellyfinLogout ||
|
||||
@@ -346,6 +376,10 @@ export function hasExplicitCommand(args: CliArgs): boolean {
|
||||
);
|
||||
}
|
||||
|
||||
export function isHeadlessInitialCommand(args: CliArgs): boolean {
|
||||
return args.refreshKnownWords;
|
||||
}
|
||||
|
||||
export function shouldStartApp(args: CliArgs): boolean {
|
||||
if (args.stop && !args.start) return false;
|
||||
if (
|
||||
@@ -361,12 +395,14 @@ export function shouldStartApp(args: CliArgs): boolean {
|
||||
args.mineSentence ||
|
||||
args.mineSentenceMultiple ||
|
||||
args.updateLastCardFromClipboard ||
|
||||
args.refreshKnownWords ||
|
||||
args.toggleSecondarySub ||
|
||||
args.triggerFieldGrouping ||
|
||||
args.triggerSubsync ||
|
||||
args.markAudioCard ||
|
||||
args.openRuntimeOptions ||
|
||||
args.dictionary ||
|
||||
args.stats ||
|
||||
args.jellyfin ||
|
||||
args.jellyfinPlay ||
|
||||
args.texthooker
|
||||
@@ -408,6 +444,7 @@ export function shouldRunSettingsOnlyStartup(args: CliArgs): boolean {
|
||||
!args.anilistSetup &&
|
||||
!args.anilistRetryQueue &&
|
||||
!args.dictionary &&
|
||||
!args.stats &&
|
||||
!args.jellyfin &&
|
||||
!args.jellyfinLogin &&
|
||||
!args.jellyfinLogout &&
|
||||
|
||||
@@ -18,7 +18,8 @@ test('printHelp includes configured texthooker port', () => {
|
||||
assert.match(output, /--help\s+Show this help/);
|
||||
assert.match(output, /default: 7777/);
|
||||
assert.match(output, /--launch-mpv/);
|
||||
assert.match(output, /--refresh-known-words/);
|
||||
assert.match(output, /--stats\s+Open the stats dashboard in your browser/);
|
||||
assert.doesNotMatch(output, /--refresh-known-words/);
|
||||
assert.match(output, /--setup\s+Open first-run setup window/);
|
||||
assert.match(output, /--anilist-status/);
|
||||
assert.match(output, /--anilist-retry-queue/);
|
||||
|
||||
@@ -14,6 +14,7 @@ ${B}Session${R}
|
||||
--start Connect to mpv and launch overlay
|
||||
--launch-mpv ${D}[targets...]${R} Launch mpv with the SubMiner mpv profile and exit
|
||||
--stop Stop the running instance
|
||||
--stats Open the stats dashboard in your browser
|
||||
--texthooker Start texthooker server only ${D}(no overlay)${R}
|
||||
|
||||
${B}Overlay${R}
|
||||
@@ -34,7 +35,6 @@ ${B}Mining${R}
|
||||
--trigger-field-grouping Run Kiku field grouping
|
||||
--trigger-subsync Run subtitle sync
|
||||
--toggle-secondary-sub Cycle secondary subtitle mode
|
||||
--refresh-known-words Refresh known words cache
|
||||
--open-runtime-options Open runtime options palette
|
||||
|
||||
${B}AniList${R}
|
||||
|
||||
@@ -85,11 +85,17 @@ test('loads defaults when config is missing', () => {
|
||||
assert.equal(config.immersionTracking.queueCap, 1000);
|
||||
assert.equal(config.immersionTracking.payloadCapBytes, 256);
|
||||
assert.equal(config.immersionTracking.maintenanceIntervalMs, 86_400_000);
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 7);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 30);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 365);
|
||||
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 1825);
|
||||
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 7);
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.sessionsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 0);
|
||||
assert.equal(config.immersionTracking.retentionMode, 'preset');
|
||||
assert.equal(config.immersionTracking.retentionPreset, 'balanced');
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.global, true);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.media, true);
|
||||
});
|
||||
|
||||
test('throws actionable startup parse error for malformed config at construction time', () => {
|
||||
@@ -742,12 +748,20 @@ test('accepts immersion tracking config values', () => {
|
||||
"queueCap": 2000,
|
||||
"payloadCapBytes": 512,
|
||||
"maintenanceIntervalMs": 3600000,
|
||||
"retentionMode": "preset",
|
||||
"retentionPreset": "minimal",
|
||||
"retention": {
|
||||
"eventsDays": 14,
|
||||
"telemetryDays": 45,
|
||||
"sessionsDays": 60,
|
||||
"dailyRollupsDays": 730,
|
||||
"monthlyRollupsDays": 3650,
|
||||
"vacuumIntervalDays": 14
|
||||
},
|
||||
"lifetimeSummaries": {
|
||||
"global": false,
|
||||
"anime": true,
|
||||
"media": false
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -766,9 +780,15 @@ test('accepts immersion tracking config values', () => {
|
||||
assert.equal(config.immersionTracking.maintenanceIntervalMs, 3_600_000);
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 14);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 45);
|
||||
assert.equal(config.immersionTracking.retention.sessionsDays, 60);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 730);
|
||||
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 3650);
|
||||
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 14);
|
||||
assert.equal(config.immersionTracking.retentionMode, 'preset');
|
||||
assert.equal(config.immersionTracking.retentionPreset, 'minimal');
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.global, false);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.media, false);
|
||||
});
|
||||
|
||||
test('falls back for invalid immersion tracking tuning values', () => {
|
||||
@@ -777,18 +797,22 @@ test('falls back for invalid immersion tracking tuning values', () => {
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"immersionTracking": {
|
||||
"retentionMode": "bad",
|
||||
"retentionPreset": "bad",
|
||||
"batchSize": 0,
|
||||
"flushIntervalMs": 1,
|
||||
"queueCap": 5,
|
||||
"payloadCapBytes": 16,
|
||||
"maintenanceIntervalMs": 1000,
|
||||
"retention": {
|
||||
"eventsDays": 0,
|
||||
"eventsDays": -1,
|
||||
"telemetryDays": 99999,
|
||||
"dailyRollupsDays": 0,
|
||||
"sessionsDays": -1,
|
||||
"dailyRollupsDays": -1,
|
||||
"monthlyRollupsDays": 999999,
|
||||
"vacuumIntervalDays": 0
|
||||
}
|
||||
"vacuumIntervalDays": -1
|
||||
},
|
||||
"lifetimeSummaries": "bad"
|
||||
}
|
||||
}`,
|
||||
'utf-8',
|
||||
@@ -803,11 +827,17 @@ test('falls back for invalid immersion tracking tuning values', () => {
|
||||
assert.equal(config.immersionTracking.queueCap, 1000);
|
||||
assert.equal(config.immersionTracking.payloadCapBytes, 256);
|
||||
assert.equal(config.immersionTracking.maintenanceIntervalMs, 86_400_000);
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 7);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 30);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 365);
|
||||
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 1825);
|
||||
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 7);
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.sessionsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 0);
|
||||
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 0);
|
||||
assert.equal(config.immersionTracking.retentionMode, 'preset');
|
||||
assert.equal(config.immersionTracking.retentionPreset, 'balanced');
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.global, true);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
|
||||
assert.equal(config.immersionTracking.lifetimeSummaries?.media, true);
|
||||
|
||||
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.batchSize'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.flushIntervalMs'));
|
||||
@@ -818,6 +848,9 @@ test('falls back for invalid immersion tracking tuning values', () => {
|
||||
assert.ok(
|
||||
warnings.some((warning) => warning.path === 'immersionTracking.retention.telemetryDays'),
|
||||
);
|
||||
assert.ok(
|
||||
warnings.some((warning) => warning.path === 'immersionTracking.retention.sessionsDays'),
|
||||
);
|
||||
assert.ok(
|
||||
warnings.some((warning) => warning.path === 'immersionTracking.retention.dailyRollupsDays'),
|
||||
);
|
||||
@@ -827,6 +860,37 @@ test('falls back for invalid immersion tracking tuning values', () => {
|
||||
assert.ok(
|
||||
warnings.some((warning) => warning.path === 'immersionTracking.retention.vacuumIntervalDays'),
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.retentionMode'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.retentionPreset'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.lifetimeSummaries'));
|
||||
});
|
||||
|
||||
test('applies retention presets and explicit overrides', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"immersionTracking": {
|
||||
"retentionMode": "preset",
|
||||
"retentionPreset": "minimal",
|
||||
"retention": {
|
||||
"eventsDays": 11,
|
||||
"sessionsDays": 8
|
||||
}
|
||||
}
|
||||
}`,
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const service = new ConfigService(dir);
|
||||
const config = service.getConfig();
|
||||
|
||||
assert.equal(config.immersionTracking.retentionMode, 'preset');
|
||||
assert.equal(config.immersionTracking.retentionPreset, 'minimal');
|
||||
assert.equal(config.immersionTracking.retention.eventsDays, 11);
|
||||
assert.equal(config.immersionTracking.retention.sessionsDays, 8);
|
||||
assert.equal(config.immersionTracking.retention.telemetryDays, 14);
|
||||
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 30);
|
||||
});
|
||||
|
||||
test('parses jsonc and warns/falls back on invalid value', () => {
|
||||
@@ -1363,15 +1427,16 @@ test('runtime options registry is centralized', () => {
|
||||
]);
|
||||
});
|
||||
|
||||
test('validates ankiConnect n+1 behavior values', () => {
|
||||
test('validates ankiConnect knownWords behavior values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"knownWords": {
|
||||
"highlightEnabled": "yes",
|
||||
"refreshMinutes": -5
|
||||
"refreshMinutes": -5,
|
||||
"addMinedWordsImmediately": "no"
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -1383,26 +1448,34 @@ test('validates ankiConnect n+1 behavior values', () => {
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.equal(
|
||||
config.ankiConnect.nPlusOne.highlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.highlightEnabled,
|
||||
config.ankiConnect.knownWords.highlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled,
|
||||
);
|
||||
assert.equal(
|
||||
config.ankiConnect.nPlusOne.refreshMinutes,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.refreshMinutes,
|
||||
config.ankiConnect.knownWords.refreshMinutes,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes,
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.highlightEnabled'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.refreshMinutes'));
|
||||
assert.equal(
|
||||
config.ankiConnect.knownWords.addMinedWordsImmediately,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.addMinedWordsImmediately,
|
||||
);
|
||||
assert.ok(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.knownWords.addMinedWordsImmediately'),
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.highlightEnabled'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.refreshMinutes'));
|
||||
});
|
||||
|
||||
test('accepts valid ankiConnect n+1 behavior values', () => {
|
||||
test('accepts valid ankiConnect knownWords behavior values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"knownWords": {
|
||||
"highlightEnabled": true,
|
||||
"refreshMinutes": 120
|
||||
"refreshMinutes": 120,
|
||||
"addMinedWordsImmediately": false
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -1412,8 +1485,9 @@ test('accepts valid ankiConnect n+1 behavior values', () => {
|
||||
const service = new ConfigService(dir);
|
||||
const config = service.getConfig();
|
||||
|
||||
assert.equal(config.ankiConnect.nPlusOne.highlightEnabled, true);
|
||||
assert.equal(config.ankiConnect.nPlusOne.refreshMinutes, 120);
|
||||
assert.equal(config.ankiConnect.knownWords.highlightEnabled, true);
|
||||
assert.equal(config.ankiConnect.knownWords.refreshMinutes, 120);
|
||||
assert.equal(config.ankiConnect.knownWords.addMinedWordsImmediately, false);
|
||||
});
|
||||
|
||||
test('validates ankiConnect n+1 minimum sentence word count', () => {
|
||||
@@ -1461,13 +1535,13 @@ test('accepts valid ankiConnect n+1 minimum sentence word count', () => {
|
||||
assert.equal(config.ankiConnect.nPlusOne.minSentenceWords, 4);
|
||||
});
|
||||
|
||||
test('validates ankiConnect n+1 match mode values', () => {
|
||||
test('validates ankiConnect knownWords match mode values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"knownWords": {
|
||||
"matchMode": "bad-mode"
|
||||
}
|
||||
}
|
||||
@@ -1480,19 +1554,19 @@ test('validates ankiConnect n+1 match mode values', () => {
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.equal(
|
||||
config.ankiConnect.nPlusOne.matchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode,
|
||||
config.ankiConnect.knownWords.matchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode,
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.matchMode'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.matchMode'));
|
||||
});
|
||||
|
||||
test('accepts valid ankiConnect n+1 match mode values', () => {
|
||||
test('accepts valid ankiConnect knownWords match mode values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"knownWords": {
|
||||
"matchMode": "surface"
|
||||
}
|
||||
}
|
||||
@@ -1503,18 +1577,20 @@ test('accepts valid ankiConnect n+1 match mode values', () => {
|
||||
const service = new ConfigService(dir);
|
||||
const config = service.getConfig();
|
||||
|
||||
assert.equal(config.ankiConnect.nPlusOne.matchMode, 'surface');
|
||||
assert.equal(config.ankiConnect.knownWords.matchMode, 'surface');
|
||||
});
|
||||
|
||||
test('validates ankiConnect n+1 color values', () => {
|
||||
test('validates ankiConnect knownWords and n+1 color values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"nPlusOne": "not-a-color",
|
||||
"knownWord": 123
|
||||
"nPlusOne": "not-a-color"
|
||||
},
|
||||
"knownWords": {
|
||||
"color": 123
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -1526,23 +1602,22 @@ test('validates ankiConnect n+1 color values', () => {
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.equal(config.ankiConnect.nPlusOne.nPlusOne, DEFAULT_CONFIG.ankiConnect.nPlusOne.nPlusOne);
|
||||
assert.equal(
|
||||
config.ankiConnect.nPlusOne.knownWord,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.knownWord,
|
||||
);
|
||||
assert.equal(config.ankiConnect.knownWords.color, DEFAULT_CONFIG.ankiConnect.knownWords.color);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.nPlusOne'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.knownWord'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.color'));
|
||||
});
|
||||
|
||||
test('accepts valid ankiConnect n+1 color values', () => {
|
||||
test('accepts valid ankiConnect knownWords and n+1 color values', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"nPlusOne": "#c6a0f6",
|
||||
"knownWord": "#a6da95"
|
||||
"nPlusOne": "#c6a0f6"
|
||||
},
|
||||
"knownWords": {
|
||||
"color": "#a6da95"
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -1553,7 +1628,49 @@ test('accepts valid ankiConnect n+1 color values', () => {
|
||||
const config = service.getConfig();
|
||||
|
||||
assert.equal(config.ankiConnect.nPlusOne.nPlusOne, '#c6a0f6');
|
||||
assert.equal(config.ankiConnect.nPlusOne.knownWord, '#a6da95');
|
||||
assert.equal(config.ankiConnect.knownWords.color, '#a6da95');
|
||||
});
|
||||
|
||||
test('supports legacy ankiConnect nPlusOne known-word settings as fallback', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"highlightEnabled": true,
|
||||
"refreshMinutes": 90,
|
||||
"matchMode": "surface",
|
||||
"decks": ["Mining", "Kaishi 1.5k"],
|
||||
"knownWord": "#a6da95"
|
||||
}
|
||||
}
|
||||
}`,
|
||||
'utf-8',
|
||||
);
|
||||
|
||||
const service = new ConfigService(dir);
|
||||
const config = service.getConfig();
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.equal(config.ankiConnect.knownWords.highlightEnabled, true);
|
||||
assert.equal(config.ankiConnect.knownWords.refreshMinutes, 90);
|
||||
assert.equal(config.ankiConnect.knownWords.matchMode, 'surface');
|
||||
assert.deepEqual(config.ankiConnect.knownWords.decks, {
|
||||
Mining: ['Expression', 'Word', 'Reading', 'Word Reading'],
|
||||
'Kaishi 1.5k': ['Expression', 'Word', 'Reading', 'Word Reading'],
|
||||
});
|
||||
assert.equal(config.ankiConnect.knownWords.color, '#a6da95');
|
||||
assert.ok(
|
||||
warnings.some(
|
||||
(warning) =>
|
||||
warning.path === 'ankiConnect.nPlusOne.highlightEnabled' ||
|
||||
warning.path === 'ankiConnect.nPlusOne.refreshMinutes' ||
|
||||
warning.path === 'ankiConnect.nPlusOne.matchMode' ||
|
||||
warning.path === 'ankiConnect.nPlusOne.decks' ||
|
||||
warning.path === 'ankiConnect.nPlusOne.knownWord',
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('supports legacy ankiConnect.behavior N+1 settings as fallback', () => {
|
||||
@@ -1576,9 +1693,9 @@ test('supports legacy ankiConnect.behavior N+1 settings as fallback', () => {
|
||||
const config = service.getConfig();
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.equal(config.ankiConnect.nPlusOne.highlightEnabled, true);
|
||||
assert.equal(config.ankiConnect.nPlusOne.refreshMinutes, 90);
|
||||
assert.equal(config.ankiConnect.nPlusOne.matchMode, 'surface');
|
||||
assert.equal(config.ankiConnect.knownWords.highlightEnabled, true);
|
||||
assert.equal(config.ankiConnect.knownWords.refreshMinutes, 90);
|
||||
assert.equal(config.ankiConnect.knownWords.matchMode, 'surface');
|
||||
assert.ok(
|
||||
warnings.some(
|
||||
(warning) =>
|
||||
@@ -1799,14 +1916,14 @@ test('ignores deprecated isLapis sentence-card field overrides', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('accepts valid ankiConnect n+1 deck list', () => {
|
||||
test('accepts valid ankiConnect knownWords deck object', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"decks": ["Deck One", "Deck Two"]
|
||||
"knownWords": {
|
||||
"decks": { "Deck One": ["Word", "Reading"], "Deck Two": ["Expression"] }
|
||||
}
|
||||
}
|
||||
}`,
|
||||
@@ -1816,7 +1933,10 @@ test('accepts valid ankiConnect n+1 deck list', () => {
|
||||
const service = new ConfigService(dir);
|
||||
const config = service.getConfig();
|
||||
|
||||
assert.deepEqual(config.ankiConnect.nPlusOne.decks, ['Deck One', 'Deck Two']);
|
||||
assert.deepEqual(config.ankiConnect.knownWords.decks, {
|
||||
'Deck One': ['Word', 'Reading'],
|
||||
'Deck Two': ['Expression'],
|
||||
});
|
||||
});
|
||||
|
||||
test('accepts valid ankiConnect tags list', () => {
|
||||
@@ -1857,13 +1977,13 @@ test('falls back to default when ankiConnect tags list is invalid', () => {
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.tags'));
|
||||
});
|
||||
|
||||
test('falls back to default when ankiConnect n+1 deck list is invalid', () => {
|
||||
test('falls back to default when ankiConnect knownWords deck list is invalid', () => {
|
||||
const dir = makeTempDir();
|
||||
fs.writeFileSync(
|
||||
path.join(dir, 'config.jsonc'),
|
||||
`{
|
||||
"ankiConnect": {
|
||||
"nPlusOne": {
|
||||
"knownWords": {
|
||||
"decks": "not-an-array"
|
||||
}
|
||||
}
|
||||
@@ -1875,8 +1995,8 @@ test('falls back to default when ankiConnect n+1 deck list is invalid', () => {
|
||||
const config = service.getConfig();
|
||||
const warnings = service.getWarnings();
|
||||
|
||||
assert.deepEqual(config.ankiConnect.nPlusOne.decks, []);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.decks'));
|
||||
assert.deepEqual(config.ankiConnect.knownWords.decks, {});
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.decks'));
|
||||
});
|
||||
|
||||
test('template generator includes known keys', () => {
|
||||
@@ -1891,9 +2011,10 @@ test('template generator includes known keys', () => {
|
||||
assert.match(output, /"youtubeSubgen":/);
|
||||
assert.match(output, /"characterDictionary":\s*\{/);
|
||||
assert.match(output, /"preserveLineBreaks": false/);
|
||||
assert.match(output, /"knownWords"\s*:\s*\{/);
|
||||
assert.match(output, /"color": "#a6da95"/);
|
||||
assert.match(output, /"nPlusOne"\s*:\s*\{/);
|
||||
assert.match(output, /"nPlusOne": "#c6a0f6"/);
|
||||
assert.match(output, /"knownWord": "#a6da95"/);
|
||||
assert.match(output, /"minSentenceWords": 3/);
|
||||
assert.match(output, /auto-generated from src\/config\/definitions.ts/);
|
||||
assert.match(
|
||||
|
||||
@@ -2,10 +2,12 @@ import { RawConfig, ResolvedConfig } from '../types';
|
||||
import { CORE_DEFAULT_CONFIG } from './definitions/defaults-core';
|
||||
import { IMMERSION_DEFAULT_CONFIG } from './definitions/defaults-immersion';
|
||||
import { INTEGRATIONS_DEFAULT_CONFIG } from './definitions/defaults-integrations';
|
||||
import { STATS_DEFAULT_CONFIG } from './definitions/defaults-stats';
|
||||
import { SUBTITLE_DEFAULT_CONFIG } from './definitions/defaults-subtitle';
|
||||
import { buildCoreConfigOptionRegistry } from './definitions/options-core';
|
||||
import { buildImmersionConfigOptionRegistry } from './definitions/options-immersion';
|
||||
import { buildIntegrationConfigOptionRegistry } from './definitions/options-integrations';
|
||||
import { buildStatsConfigOptionRegistry } from './definitions/options-stats';
|
||||
import { buildSubtitleConfigOptionRegistry } from './definitions/options-subtitle';
|
||||
import { buildRuntimeOptionRegistry } from './definitions/runtime-options';
|
||||
import { CONFIG_TEMPLATE_SECTIONS } from './definitions/template-sections';
|
||||
@@ -36,6 +38,7 @@ const { ankiConnect, jimaku, anilist, yomitan, jellyfin, discordPresence, ai, yo
|
||||
INTEGRATIONS_DEFAULT_CONFIG;
|
||||
const { subtitleStyle } = SUBTITLE_DEFAULT_CONFIG;
|
||||
const { immersionTracking } = IMMERSION_DEFAULT_CONFIG;
|
||||
const { stats } = STATS_DEFAULT_CONFIG;
|
||||
|
||||
export const DEFAULT_CONFIG: ResolvedConfig = {
|
||||
subtitlePosition,
|
||||
@@ -60,6 +63,7 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
|
||||
ai,
|
||||
youtubeSubgen,
|
||||
immersionTracking,
|
||||
stats,
|
||||
};
|
||||
|
||||
export const DEFAULT_ANKI_CONNECT_CONFIG = DEFAULT_CONFIG.ankiConnect;
|
||||
@@ -71,6 +75,7 @@ export const CONFIG_OPTION_REGISTRY = [
|
||||
...buildSubtitleConfigOptionRegistry(DEFAULT_CONFIG),
|
||||
...buildIntegrationConfigOptionRegistry(DEFAULT_CONFIG, RUNTIME_OPTION_REGISTRY),
|
||||
...buildImmersionConfigOptionRegistry(DEFAULT_CONFIG),
|
||||
...buildStatsConfigOptionRegistry(DEFAULT_CONFIG),
|
||||
];
|
||||
|
||||
export { CONFIG_TEMPLATE_SECTIONS };
|
||||
|
||||
@@ -9,12 +9,20 @@ export const IMMERSION_DEFAULT_CONFIG: Pick<ResolvedConfig, 'immersionTracking'>
|
||||
queueCap: 1000,
|
||||
payloadCapBytes: 256,
|
||||
maintenanceIntervalMs: 24 * 60 * 60 * 1000,
|
||||
retentionMode: 'preset',
|
||||
retentionPreset: 'balanced',
|
||||
retention: {
|
||||
eventsDays: 7,
|
||||
telemetryDays: 30,
|
||||
dailyRollupsDays: 365,
|
||||
monthlyRollupsDays: 5 * 365,
|
||||
vacuumIntervalDays: 7,
|
||||
eventsDays: 0,
|
||||
telemetryDays: 0,
|
||||
sessionsDays: 0,
|
||||
dailyRollupsDays: 0,
|
||||
monthlyRollupsDays: 0,
|
||||
vacuumIntervalDays: 0,
|
||||
},
|
||||
lifetimeSummaries: {
|
||||
global: true,
|
||||
anime: true,
|
||||
media: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -23,6 +23,7 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
|
||||
},
|
||||
tags: ['SubMiner'],
|
||||
fields: {
|
||||
word: 'Expression',
|
||||
audio: 'ExpressionAudio',
|
||||
image: 'Picture',
|
||||
sentence: 'Sentence',
|
||||
@@ -46,10 +47,19 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
|
||||
animatedMaxWidth: 640,
|
||||
animatedMaxHeight: undefined,
|
||||
animatedCrf: 35,
|
||||
syncAnimatedImageToWordAudio: true,
|
||||
audioPadding: 0.5,
|
||||
fallbackDuration: 3.0,
|
||||
maxMediaDuration: 30,
|
||||
},
|
||||
knownWords: {
|
||||
highlightEnabled: false,
|
||||
refreshMinutes: 1440,
|
||||
addMinedWordsImmediately: true,
|
||||
matchMode: 'headword',
|
||||
decks: {},
|
||||
color: '#a6da95',
|
||||
},
|
||||
behavior: {
|
||||
overwriteAudio: true,
|
||||
overwriteImage: true,
|
||||
@@ -59,13 +69,8 @@ export const INTEGRATIONS_DEFAULT_CONFIG: Pick<
|
||||
autoUpdateNewCards: true,
|
||||
},
|
||||
nPlusOne: {
|
||||
highlightEnabled: false,
|
||||
refreshMinutes: 1440,
|
||||
matchMode: 'headword',
|
||||
decks: [],
|
||||
minSentenceWords: 3,
|
||||
nPlusOne: '#c6a0f6',
|
||||
knownWord: '#a6da95',
|
||||
},
|
||||
metadata: {
|
||||
pattern: '[SubMiner] %f (%t)',
|
||||
|
||||
11
src/config/definitions/defaults-stats.ts
Normal file
11
src/config/definitions/defaults-stats.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { ResolvedConfig } from '../../types.js';
|
||||
|
||||
export const STATS_DEFAULT_CONFIG: Pick<ResolvedConfig, 'stats'> = {
|
||||
stats: {
|
||||
toggleKey: 'Backquote',
|
||||
markWatchedKey: 'KeyW',
|
||||
serverPort: 6969,
|
||||
autoStartServer: true,
|
||||
autoOpenBrowser: true,
|
||||
},
|
||||
};
|
||||
@@ -48,35 +48,73 @@ export function buildImmersionConfigOptionRegistry(
|
||||
defaultValue: defaultConfig.immersionTracking.maintenanceIntervalMs,
|
||||
description: 'Maintenance cadence (prune + rollup + vacuum checks).',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retentionMode',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.immersionTracking.retentionMode,
|
||||
description: 'Retention mode (`preset` uses preset values, `advanced` uses explicit values).',
|
||||
enumValues: ['preset', 'advanced'],
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retentionPreset',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.immersionTracking.retentionPreset,
|
||||
description: 'Retention preset when `retentionMode` is `preset`.',
|
||||
enumValues: ['minimal', 'balanced', 'deep-history'],
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.eventsDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.eventsDays,
|
||||
description: 'Raw event retention window in days.',
|
||||
description: 'Raw event retention window in days. Use 0 to keep all.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.telemetryDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.telemetryDays,
|
||||
description: 'Telemetry retention window in days.',
|
||||
description: 'Telemetry retention window in days. Use 0 to keep all.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.sessionsDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.sessionsDays,
|
||||
description: 'Session retention window in days. Use 0 to keep all.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.dailyRollupsDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.dailyRollupsDays,
|
||||
description: 'Daily rollup retention window in days.',
|
||||
description: 'Daily rollup retention window in days. Use 0 to keep all.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.monthlyRollupsDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.monthlyRollupsDays,
|
||||
description: 'Monthly rollup retention window in days.',
|
||||
description: 'Monthly rollup retention window in days. Use 0 to keep all.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.retention.vacuumIntervalDays',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.immersionTracking.retention.vacuumIntervalDays,
|
||||
description: 'Minimum days between VACUUM runs.',
|
||||
description: 'Minimum days between VACUUM runs. Use 0 to disable.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.lifetimeSummaries.global',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.global,
|
||||
description: 'Maintain global lifetime stats rows.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.lifetimeSummaries.anime',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.anime,
|
||||
description: 'Maintain per-anime lifetime stats rows.',
|
||||
},
|
||||
{
|
||||
path: 'immersionTracking.lifetimeSummaries.media',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.media,
|
||||
description: 'Maintain per-media lifetime stats rows.',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
@@ -51,6 +51,12 @@ export function buildIntegrationConfigOptionRegistry(
|
||||
description:
|
||||
'Tags to add to cards mined or updated by SubMiner. Provide an empty array to disable automatic tagging.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.fields.word',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.ankiConnect.fields.word,
|
||||
description: 'Card field for the mined word or expression text.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.ai.enabled',
|
||||
kind: 'boolean',
|
||||
@@ -77,24 +83,37 @@ export function buildIntegrationConfigOptionRegistry(
|
||||
runtime: runtimeOptionById.get('anki.autoUpdateNewCards'),
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.matchMode',
|
||||
kind: 'enum',
|
||||
enumValues: ['headword', 'surface'],
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.matchMode,
|
||||
description: 'Known-word matching strategy for N+1 highlighting.',
|
||||
path: 'ankiConnect.media.syncAnimatedImageToWordAudio',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.ankiConnect.media.syncAnimatedImageToWordAudio,
|
||||
description:
|
||||
'For animated AVIF images, prepend a frozen first frame matching the existing word-audio duration so motion starts with sentence audio.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.highlightEnabled',
|
||||
path: 'ankiConnect.knownWords.matchMode',
|
||||
kind: 'enum',
|
||||
enumValues: ['headword', 'surface'],
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.matchMode,
|
||||
description: 'Known-word matching strategy for subtitle annotations.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.knownWords.highlightEnabled',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.highlightEnabled,
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.highlightEnabled,
|
||||
description: 'Enable fast local highlighting for words already known in Anki.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.refreshMinutes',
|
||||
path: 'ankiConnect.knownWords.refreshMinutes',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.refreshMinutes,
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.refreshMinutes,
|
||||
description: 'Minutes between known-word cache refreshes.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.knownWords.addMinedWordsImmediately',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.addMinedWordsImmediately,
|
||||
description: 'Immediately append newly mined card words into the known-word cache.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.minSentenceWords',
|
||||
kind: 'number',
|
||||
@@ -102,10 +121,11 @@ export function buildIntegrationConfigOptionRegistry(
|
||||
description: 'Minimum sentence word count required for N+1 targeting (default: 3).',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.decks',
|
||||
kind: 'array',
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.decks,
|
||||
description: 'Decks used for N+1 known-word cache scope. Supports one or more deck names.',
|
||||
path: 'ankiConnect.knownWords.decks',
|
||||
kind: 'object',
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.decks,
|
||||
description:
|
||||
'Decks and fields for known-word cache. Object mapping deck names to arrays of field names to extract, e.g. { "Kaishi 1.5k": ["Word", "Word Reading"] }.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.nPlusOne',
|
||||
@@ -114,10 +134,10 @@ export function buildIntegrationConfigOptionRegistry(
|
||||
description: 'Color used for the single N+1 target token highlight.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.nPlusOne.knownWord',
|
||||
path: 'ankiConnect.knownWords.color',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.knownWord,
|
||||
description: 'Color used for legacy known-word highlights.',
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.color,
|
||||
description: 'Color used for known-word highlights.',
|
||||
},
|
||||
{
|
||||
path: 'ankiConnect.isKiku.fieldGrouping',
|
||||
|
||||
39
src/config/definitions/options-stats.ts
Normal file
39
src/config/definitions/options-stats.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { ResolvedConfig } from '../../types.js';
|
||||
import { ConfigOptionRegistryEntry } from './shared.js';
|
||||
|
||||
export function buildStatsConfigOptionRegistry(
|
||||
defaultConfig: ResolvedConfig,
|
||||
): ConfigOptionRegistryEntry[] {
|
||||
return [
|
||||
{
|
||||
path: 'stats.toggleKey',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.stats.toggleKey,
|
||||
description: 'Key code to toggle the stats overlay.',
|
||||
},
|
||||
{
|
||||
path: 'stats.markWatchedKey',
|
||||
kind: 'string',
|
||||
defaultValue: defaultConfig.stats.markWatchedKey,
|
||||
description: 'Key code to mark the current video as watched and advance to the next playlist entry.',
|
||||
},
|
||||
{
|
||||
path: 'stats.serverPort',
|
||||
kind: 'number',
|
||||
defaultValue: defaultConfig.stats.serverPort,
|
||||
description: 'Port for the stats HTTP server.',
|
||||
},
|
||||
{
|
||||
path: 'stats.autoStartServer',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.stats.autoStartServer,
|
||||
description: 'Automatically start the stats server on launch.',
|
||||
},
|
||||
{
|
||||
path: 'stats.autoOpenBrowser',
|
||||
kind: 'boolean',
|
||||
defaultValue: defaultConfig.stats.autoOpenBrowser,
|
||||
description: 'Automatically open the stats dashboard in a browser when the server starts.',
|
||||
},
|
||||
];
|
||||
}
|
||||
@@ -21,15 +21,19 @@ export function buildRuntimeOptionRegistry(
|
||||
},
|
||||
{
|
||||
id: 'subtitle.annotation.nPlusOne',
|
||||
path: 'ankiConnect.nPlusOne.highlightEnabled',
|
||||
path: 'ankiConnect.knownWords.highlightEnabled',
|
||||
label: 'N+1 Annotation',
|
||||
scope: 'subtitle',
|
||||
valueType: 'boolean',
|
||||
allowedValues: [true, false],
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.highlightEnabled,
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.highlightEnabled,
|
||||
requiresRestart: false,
|
||||
formatValueForOsd: (value) => (value === true ? 'On' : 'Off'),
|
||||
toAnkiPatch: () => ({}),
|
||||
toAnkiPatch: (value) => ({
|
||||
knownWords: {
|
||||
highlightEnabled: value === true,
|
||||
},
|
||||
}),
|
||||
},
|
||||
{
|
||||
id: 'subtitle.annotation.jlpt',
|
||||
@@ -57,16 +61,16 @@ export function buildRuntimeOptionRegistry(
|
||||
},
|
||||
{
|
||||
id: 'anki.nPlusOneMatchMode',
|
||||
path: 'ankiConnect.nPlusOne.matchMode',
|
||||
label: 'N+1 Match Mode',
|
||||
path: 'ankiConnect.knownWords.matchMode',
|
||||
label: 'Known Word Match Mode',
|
||||
scope: 'ankiConnect',
|
||||
valueType: 'enum',
|
||||
allowedValues: ['headword', 'surface'],
|
||||
defaultValue: defaultConfig.ankiConnect.nPlusOne.matchMode,
|
||||
defaultValue: defaultConfig.ankiConnect.knownWords.matchMode,
|
||||
requiresRestart: false,
|
||||
formatValueForOsd: (value) => String(value),
|
||||
toAnkiPatch: (value) => ({
|
||||
nPlusOne: {
|
||||
knownWords: {
|
||||
matchMode: value === 'headword' || value === 'surface' ? value : 'headword',
|
||||
},
|
||||
}),
|
||||
|
||||
@@ -176,6 +176,14 @@ const IMMERSION_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
||||
],
|
||||
key: 'immersionTracking',
|
||||
},
|
||||
{
|
||||
title: 'Stats Dashboard',
|
||||
description: [
|
||||
'Local immersion stats dashboard served on localhost and available as an in-app overlay.',
|
||||
'Uses the immersion tracking database for overview, trends, sessions, and vocabulary views.',
|
||||
],
|
||||
key: 'stats',
|
||||
},
|
||||
];
|
||||
|
||||
export const CONFIG_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
|
||||
|
||||
@@ -4,6 +4,7 @@ import { createResolveContext } from './resolve/context';
|
||||
import { applyCoreDomainConfig } from './resolve/core-domains';
|
||||
import { applyImmersionTrackingConfig } from './resolve/immersion-tracking';
|
||||
import { applyIntegrationConfig } from './resolve/integrations';
|
||||
import { applyStatsConfig } from './resolve/stats';
|
||||
import { applySubtitleDomainConfig } from './resolve/subtitle-domains';
|
||||
import { applyTopLevelConfig } from './resolve/top-level';
|
||||
|
||||
@@ -13,6 +14,7 @@ const APPLY_RESOLVE_STEPS = [
|
||||
applySubtitleDomainConfig,
|
||||
applyIntegrationConfig,
|
||||
applyImmersionTrackingConfig,
|
||||
applyStatsConfig,
|
||||
applyAnkiConnectResolution,
|
||||
] as const;
|
||||
|
||||
|
||||
@@ -20,21 +20,21 @@ function makeContext(ankiConnect: unknown): {
|
||||
return { context, warnings };
|
||||
}
|
||||
|
||||
test('modern invalid nPlusOne.highlightEnabled warns modern key and does not fallback to legacy', () => {
|
||||
test('modern invalid knownWords.highlightEnabled warns modern key and does not fallback to legacy', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
behavior: { nPlusOneHighlightEnabled: true },
|
||||
nPlusOne: { highlightEnabled: 'yes' },
|
||||
nPlusOne: { highlightEnabled: true },
|
||||
knownWords: { highlightEnabled: 'yes' },
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.equal(
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.highlightEnabled,
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled,
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.highlightEnabled'));
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.highlightEnabled'));
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.behavior.nPlusOneHighlightEnabled'),
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.highlightEnabled'),
|
||||
false,
|
||||
);
|
||||
});
|
||||
@@ -53,18 +53,48 @@ test('normalizes ankiConnect tags by trimming and deduping', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('warns and falls back for invalid nPlusOne.decks entries', () => {
|
||||
test('accepts knownWords.decks object format with field arrays', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
nPlusOne: { decks: ['Core Deck', 123] },
|
||||
knownWords: { decks: { 'Core Deck': ['Word', 'Reading'], Mining: ['Expression'] } },
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.deepEqual(
|
||||
context.resolved.ankiConnect.nPlusOne.decks,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.decks,
|
||||
assert.deepEqual(context.resolved.ankiConnect.knownWords.decks, {
|
||||
'Core Deck': ['Word', 'Reading'],
|
||||
Mining: ['Expression'],
|
||||
});
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.knownWords.decks'),
|
||||
false,
|
||||
);
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.nPlusOne.decks'));
|
||||
});
|
||||
|
||||
test('accepts knownWords.addMinedWordsImmediately boolean override', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
knownWords: { addMinedWordsImmediately: false },
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.equal(context.resolved.ankiConnect.knownWords.addMinedWordsImmediately, false);
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.knownWords.addMinedWordsImmediately'),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('converts legacy knownWords.decks array to object with default fields', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
knownWords: { decks: ['Core Deck'] },
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.deepEqual(context.resolved.ankiConnect.knownWords.decks, {
|
||||
'Core Deck': ['Expression', 'Word', 'Reading', 'Word Reading'],
|
||||
});
|
||||
assert.ok(warnings.some((warning) => warning.path === 'ankiConnect.knownWords.decks'));
|
||||
});
|
||||
|
||||
test('accepts valid proxy settings', () => {
|
||||
@@ -89,6 +119,52 @@ test('accepts valid proxy settings', () => {
|
||||
);
|
||||
});
|
||||
|
||||
test('accepts configured ankiConnect.fields.word override', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
fields: {
|
||||
word: 'TargetWord',
|
||||
},
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.equal(context.resolved.ankiConnect.fields.word, 'TargetWord');
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.fields.word'),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('accepts ankiConnect.media.syncAnimatedImageToWordAudio override', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
media: {
|
||||
syncAnimatedImageToWordAudio: false,
|
||||
},
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.equal(context.resolved.ankiConnect.media.syncAnimatedImageToWordAudio, false);
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.media.syncAnimatedImageToWordAudio'),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('maps legacy ankiConnect.wordField to modern ankiConnect.fields.word', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
wordField: 'TargetWordLegacy',
|
||||
});
|
||||
|
||||
applyAnkiConnectResolution(context);
|
||||
|
||||
assert.equal(context.resolved.ankiConnect.fields.word, 'TargetWordLegacy');
|
||||
assert.equal(
|
||||
warnings.some((warning) => warning.path === 'ankiConnect.wordField'),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('warns and falls back for invalid proxy settings', () => {
|
||||
const { context, warnings } = makeContext({
|
||||
proxy: {
|
||||
|
||||
@@ -14,6 +14,7 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
const metadata = isObject(ac.metadata) ? (ac.metadata as Record<string, unknown>) : {};
|
||||
const proxy = isObject(ac.proxy) ? (ac.proxy as Record<string, unknown>) : {};
|
||||
const legacyKeys = new Set([
|
||||
'wordField',
|
||||
'audioField',
|
||||
'imageField',
|
||||
'sentenceField',
|
||||
@@ -30,6 +31,7 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
'animatedMaxWidth',
|
||||
'animatedMaxHeight',
|
||||
'animatedCrf',
|
||||
'syncAnimatedImageToWordAudio',
|
||||
'audioPadding',
|
||||
'fallbackDuration',
|
||||
'maxMediaDuration',
|
||||
@@ -42,12 +44,13 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
]);
|
||||
|
||||
const {
|
||||
knownWords: _knownWordsConfigFromAnkiConnect,
|
||||
nPlusOne: _nPlusOneConfigFromAnkiConnect,
|
||||
ai: _ankiAiConfig,
|
||||
...ankiConnectWithoutNPlusOne
|
||||
...ankiConnectWithoutKnownWordsOrNPlusOne
|
||||
} = ac as Record<string, unknown>;
|
||||
const ankiConnectWithoutLegacy = Object.fromEntries(
|
||||
Object.entries(ankiConnectWithoutNPlusOne).filter(([key]) => !legacyKeys.has(key)),
|
||||
Object.entries(ankiConnectWithoutKnownWordsOrNPlusOne).filter(([key]) => !legacyKeys.has(key)),
|
||||
);
|
||||
|
||||
context.resolved.ankiConnect = {
|
||||
@@ -67,6 +70,9 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
? (ac.media as (typeof context.resolved)['ankiConnect']['media'])
|
||||
: {}),
|
||||
},
|
||||
knownWords: {
|
||||
...context.resolved.ankiConnect.knownWords,
|
||||
},
|
||||
behavior: {
|
||||
...context.resolved.ankiConnect.behavior,
|
||||
...(isObject(ac.behavior)
|
||||
@@ -355,6 +361,17 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
'Expected string.',
|
||||
);
|
||||
}
|
||||
if (!hasOwn(fields, 'word')) {
|
||||
mapLegacy(
|
||||
'wordField',
|
||||
asString,
|
||||
(value) => {
|
||||
context.resolved.ankiConnect.fields.word = value;
|
||||
},
|
||||
context.resolved.ankiConnect.fields.word,
|
||||
'Expected string.',
|
||||
);
|
||||
}
|
||||
if (!hasOwn(fields, 'image')) {
|
||||
mapLegacy(
|
||||
'imageField',
|
||||
@@ -520,6 +537,17 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
'Expected integer between 0 and 63.',
|
||||
);
|
||||
}
|
||||
if (!hasOwn(media, 'syncAnimatedImageToWordAudio')) {
|
||||
mapLegacy(
|
||||
'syncAnimatedImageToWordAudio',
|
||||
asBoolean,
|
||||
(value) => {
|
||||
context.resolved.ankiConnect.media.syncAnimatedImageToWordAudio = value;
|
||||
},
|
||||
context.resolved.ankiConnect.media.syncAnimatedImageToWordAudio,
|
||||
'Expected boolean.',
|
||||
);
|
||||
}
|
||||
if (!hasOwn(media, 'audioPadding')) {
|
||||
mapLegacy(
|
||||
'audioPadding',
|
||||
@@ -620,81 +648,145 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
);
|
||||
}
|
||||
|
||||
const knownWordsConfig = isObject(ac.knownWords)
|
||||
? (ac.knownWords as Record<string, unknown>)
|
||||
: {};
|
||||
const nPlusOneConfig = isObject(ac.nPlusOne) ? (ac.nPlusOne as Record<string, unknown>) : {};
|
||||
|
||||
const nPlusOneHighlightEnabled = asBoolean(nPlusOneConfig.highlightEnabled);
|
||||
if (nPlusOneHighlightEnabled !== undefined) {
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled = nPlusOneHighlightEnabled;
|
||||
const knownWordsHighlightEnabled = asBoolean(knownWordsConfig.highlightEnabled);
|
||||
const legacyNPlusOneHighlightEnabled = asBoolean(nPlusOneConfig.highlightEnabled);
|
||||
if (knownWordsHighlightEnabled !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled = knownWordsHighlightEnabled;
|
||||
} else if (knownWordsConfig.highlightEnabled !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.highlightEnabled',
|
||||
knownWordsConfig.highlightEnabled,
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled,
|
||||
'Expected boolean.',
|
||||
);
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled;
|
||||
} else if (legacyNPlusOneHighlightEnabled !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled = legacyNPlusOneHighlightEnabled;
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.highlightEnabled',
|
||||
nPlusOneConfig.highlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.highlightEnabled',
|
||||
);
|
||||
} else if (nPlusOneConfig.highlightEnabled !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.highlightEnabled',
|
||||
nPlusOneConfig.highlightEnabled,
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled,
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled,
|
||||
'Expected boolean.',
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.highlightEnabled;
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled;
|
||||
} else {
|
||||
const legacyNPlusOneHighlightEnabled = asBoolean(behavior.nPlusOneHighlightEnabled);
|
||||
if (legacyNPlusOneHighlightEnabled !== undefined) {
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled = legacyNPlusOneHighlightEnabled;
|
||||
const legacyBehaviorNPlusOneHighlightEnabled = asBoolean(behavior.nPlusOneHighlightEnabled);
|
||||
if (legacyBehaviorNPlusOneHighlightEnabled !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled =
|
||||
legacyBehaviorNPlusOneHighlightEnabled;
|
||||
context.warn(
|
||||
'ankiConnect.behavior.nPlusOneHighlightEnabled',
|
||||
behavior.nPlusOneHighlightEnabled,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.highlightEnabled,
|
||||
'Legacy key is deprecated; use ankiConnect.nPlusOne.highlightEnabled',
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.highlightEnabled',
|
||||
);
|
||||
} else {
|
||||
context.resolved.ankiConnect.nPlusOne.highlightEnabled =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.highlightEnabled;
|
||||
context.resolved.ankiConnect.knownWords.highlightEnabled =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.highlightEnabled;
|
||||
}
|
||||
}
|
||||
|
||||
const nPlusOneRefreshMinutes = asNumber(nPlusOneConfig.refreshMinutes);
|
||||
const hasValidNPlusOneRefreshMinutes =
|
||||
nPlusOneRefreshMinutes !== undefined &&
|
||||
Number.isInteger(nPlusOneRefreshMinutes) &&
|
||||
nPlusOneRefreshMinutes > 0;
|
||||
if (nPlusOneRefreshMinutes !== undefined) {
|
||||
if (hasValidNPlusOneRefreshMinutes) {
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes = nPlusOneRefreshMinutes;
|
||||
const knownWordsRefreshMinutes = asNumber(knownWordsConfig.refreshMinutes);
|
||||
const legacyNPlusOneRefreshMinutes = asNumber(nPlusOneConfig.refreshMinutes);
|
||||
const hasValidKnownWordsRefreshMinutes =
|
||||
knownWordsRefreshMinutes !== undefined &&
|
||||
Number.isInteger(knownWordsRefreshMinutes) &&
|
||||
knownWordsRefreshMinutes > 0;
|
||||
const hasValidLegacyNPlusOneRefreshMinutes =
|
||||
legacyNPlusOneRefreshMinutes !== undefined &&
|
||||
Number.isInteger(legacyNPlusOneRefreshMinutes) &&
|
||||
legacyNPlusOneRefreshMinutes > 0;
|
||||
if (knownWordsRefreshMinutes !== undefined) {
|
||||
if (hasValidKnownWordsRefreshMinutes) {
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes = knownWordsRefreshMinutes;
|
||||
} else {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.refreshMinutes',
|
||||
knownWordsConfig.refreshMinutes,
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes,
|
||||
'Expected a positive integer.',
|
||||
);
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes;
|
||||
}
|
||||
} else if (legacyNPlusOneRefreshMinutes !== undefined) {
|
||||
if (hasValidLegacyNPlusOneRefreshMinutes) {
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes = legacyNPlusOneRefreshMinutes;
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.refreshMinutes',
|
||||
nPlusOneConfig.refreshMinutes,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.refreshMinutes',
|
||||
);
|
||||
} else {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.refreshMinutes',
|
||||
nPlusOneConfig.refreshMinutes,
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes,
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes,
|
||||
'Expected a positive integer.',
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.refreshMinutes;
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes;
|
||||
}
|
||||
} else if (asNumber(behavior.nPlusOneRefreshMinutes) !== undefined) {
|
||||
const legacyNPlusOneRefreshMinutes = asNumber(behavior.nPlusOneRefreshMinutes);
|
||||
const legacyBehaviorNPlusOneRefreshMinutes = asNumber(behavior.nPlusOneRefreshMinutes);
|
||||
const hasValidLegacyRefreshMinutes =
|
||||
legacyNPlusOneRefreshMinutes !== undefined &&
|
||||
Number.isInteger(legacyNPlusOneRefreshMinutes) &&
|
||||
legacyNPlusOneRefreshMinutes > 0;
|
||||
legacyBehaviorNPlusOneRefreshMinutes !== undefined &&
|
||||
Number.isInteger(legacyBehaviorNPlusOneRefreshMinutes) &&
|
||||
legacyBehaviorNPlusOneRefreshMinutes > 0;
|
||||
if (hasValidLegacyRefreshMinutes) {
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes = legacyNPlusOneRefreshMinutes;
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes = legacyBehaviorNPlusOneRefreshMinutes;
|
||||
context.warn(
|
||||
'ankiConnect.behavior.nPlusOneRefreshMinutes',
|
||||
behavior.nPlusOneRefreshMinutes,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.refreshMinutes,
|
||||
'Legacy key is deprecated; use ankiConnect.nPlusOne.refreshMinutes',
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.refreshMinutes',
|
||||
);
|
||||
} else {
|
||||
context.warn(
|
||||
'ankiConnect.behavior.nPlusOneRefreshMinutes',
|
||||
behavior.nPlusOneRefreshMinutes,
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes,
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes,
|
||||
'Expected a positive integer.',
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.refreshMinutes;
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes;
|
||||
}
|
||||
} else {
|
||||
context.resolved.ankiConnect.nPlusOne.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.refreshMinutes;
|
||||
context.resolved.ankiConnect.knownWords.refreshMinutes =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.refreshMinutes;
|
||||
}
|
||||
|
||||
const knownWordsAddMinedWordsImmediately = asBoolean(knownWordsConfig.addMinedWordsImmediately);
|
||||
if (knownWordsAddMinedWordsImmediately !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.addMinedWordsImmediately =
|
||||
knownWordsAddMinedWordsImmediately;
|
||||
} else if (knownWordsConfig.addMinedWordsImmediately !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.addMinedWordsImmediately',
|
||||
knownWordsConfig.addMinedWordsImmediately,
|
||||
context.resolved.ankiConnect.knownWords.addMinedWordsImmediately,
|
||||
'Expected boolean.',
|
||||
);
|
||||
context.resolved.ankiConnect.knownWords.addMinedWordsImmediately =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.addMinedWordsImmediately;
|
||||
} else {
|
||||
context.resolved.ankiConnect.knownWords.addMinedWordsImmediately =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.addMinedWordsImmediately;
|
||||
}
|
||||
|
||||
const nPlusOneMinSentenceWords = asNumber(nPlusOneConfig.minSentenceWords);
|
||||
@@ -720,72 +812,138 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.minSentenceWords;
|
||||
}
|
||||
|
||||
const nPlusOneMatchMode = asString(nPlusOneConfig.matchMode);
|
||||
const legacyNPlusOneMatchMode = asString(behavior.nPlusOneMatchMode);
|
||||
const hasValidNPlusOneMatchMode =
|
||||
nPlusOneMatchMode === 'headword' || nPlusOneMatchMode === 'surface';
|
||||
const hasValidLegacyMatchMode =
|
||||
const knownWordsMatchMode = asString(knownWordsConfig.matchMode);
|
||||
const legacyNPlusOneMatchMode = asString(nPlusOneConfig.matchMode);
|
||||
const legacyBehaviorNPlusOneMatchMode = asString(behavior.nPlusOneMatchMode);
|
||||
const hasValidKnownWordsMatchMode =
|
||||
knownWordsMatchMode === 'headword' || knownWordsMatchMode === 'surface';
|
||||
const hasValidLegacyNPlusOneMatchMode =
|
||||
legacyNPlusOneMatchMode === 'headword' || legacyNPlusOneMatchMode === 'surface';
|
||||
if (hasValidNPlusOneMatchMode) {
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode = nPlusOneMatchMode;
|
||||
} else if (nPlusOneMatchMode !== undefined) {
|
||||
const hasValidLegacyMatchMode =
|
||||
legacyBehaviorNPlusOneMatchMode === 'headword' || legacyBehaviorNPlusOneMatchMode === 'surface';
|
||||
if (hasValidKnownWordsMatchMode) {
|
||||
context.resolved.ankiConnect.knownWords.matchMode = knownWordsMatchMode;
|
||||
} else if (knownWordsMatchMode !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.matchMode',
|
||||
nPlusOneConfig.matchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode,
|
||||
'ankiConnect.knownWords.matchMode',
|
||||
knownWordsConfig.matchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode,
|
||||
"Expected 'headword' or 'surface'.",
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode = DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode;
|
||||
context.resolved.ankiConnect.knownWords.matchMode =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode;
|
||||
} else if (legacyNPlusOneMatchMode !== undefined) {
|
||||
if (hasValidLegacyNPlusOneMatchMode) {
|
||||
context.resolved.ankiConnect.knownWords.matchMode = legacyNPlusOneMatchMode;
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.matchMode',
|
||||
nPlusOneConfig.matchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.matchMode',
|
||||
);
|
||||
} else {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.matchMode',
|
||||
nPlusOneConfig.matchMode,
|
||||
context.resolved.ankiConnect.knownWords.matchMode,
|
||||
"Expected 'headword' or 'surface'.",
|
||||
);
|
||||
context.resolved.ankiConnect.knownWords.matchMode =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode;
|
||||
}
|
||||
} else if (legacyBehaviorNPlusOneMatchMode !== undefined) {
|
||||
if (hasValidLegacyMatchMode) {
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode = legacyNPlusOneMatchMode;
|
||||
context.resolved.ankiConnect.knownWords.matchMode = legacyBehaviorNPlusOneMatchMode;
|
||||
context.warn(
|
||||
'ankiConnect.behavior.nPlusOneMatchMode',
|
||||
behavior.nPlusOneMatchMode,
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode,
|
||||
'Legacy key is deprecated; use ankiConnect.nPlusOne.matchMode',
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.matchMode',
|
||||
);
|
||||
} else {
|
||||
context.warn(
|
||||
'ankiConnect.behavior.nPlusOneMatchMode',
|
||||
behavior.nPlusOneMatchMode,
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode,
|
||||
context.resolved.ankiConnect.knownWords.matchMode,
|
||||
"Expected 'headword' or 'surface'.",
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode =
|
||||
DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode;
|
||||
context.resolved.ankiConnect.knownWords.matchMode =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode;
|
||||
}
|
||||
} else {
|
||||
context.resolved.ankiConnect.nPlusOne.matchMode = DEFAULT_CONFIG.ankiConnect.nPlusOne.matchMode;
|
||||
context.resolved.ankiConnect.knownWords.matchMode =
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.matchMode;
|
||||
}
|
||||
|
||||
const nPlusOneDecks = nPlusOneConfig.decks;
|
||||
if (Array.isArray(nPlusOneDecks)) {
|
||||
const normalizedDecks = nPlusOneDecks
|
||||
const DEFAULT_FIELDS = [
|
||||
DEFAULT_CONFIG.ankiConnect.fields.word,
|
||||
'Word',
|
||||
'Reading',
|
||||
'Word Reading',
|
||||
];
|
||||
const knownWordsDecks = knownWordsConfig.decks;
|
||||
const legacyNPlusOneDecks = nPlusOneConfig.decks;
|
||||
if (isObject(knownWordsDecks)) {
|
||||
const resolved: Record<string, string[]> = {};
|
||||
for (const [deck, fields] of Object.entries(knownWordsDecks as Record<string, unknown>)) {
|
||||
const deckName = deck.trim();
|
||||
if (!deckName) continue;
|
||||
if (Array.isArray(fields) && fields.every((f) => typeof f === 'string')) {
|
||||
resolved[deckName] = (fields as string[]).map((f) => f.trim()).filter((f) => f.length > 0);
|
||||
} else {
|
||||
context.warn(
|
||||
`ankiConnect.knownWords.decks["${deckName}"]`,
|
||||
fields,
|
||||
DEFAULT_FIELDS,
|
||||
'Expected an array of field name strings.',
|
||||
);
|
||||
resolved[deckName] = DEFAULT_FIELDS;
|
||||
}
|
||||
}
|
||||
context.resolved.ankiConnect.knownWords.decks = resolved;
|
||||
} else if (Array.isArray(knownWordsDecks)) {
|
||||
const normalized = knownWordsDecks
|
||||
.filter((entry): entry is string => typeof entry === 'string')
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => entry.length > 0);
|
||||
|
||||
if (normalizedDecks.length === nPlusOneDecks.length) {
|
||||
context.resolved.ankiConnect.nPlusOne.decks = [...new Set(normalizedDecks)];
|
||||
} else if (nPlusOneDecks.length > 0) {
|
||||
const resolved: Record<string, string[]> = {};
|
||||
for (const deck of new Set(normalized)) {
|
||||
resolved[deck] = DEFAULT_FIELDS;
|
||||
}
|
||||
context.resolved.ankiConnect.knownWords.decks = resolved;
|
||||
if (normalized.length > 0) {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.decks',
|
||||
knownWordsDecks,
|
||||
resolved,
|
||||
'Legacy array format is deprecated; use object format: { "Deck Name": ["Field1", "Field2"] }',
|
||||
);
|
||||
}
|
||||
} else if (knownWordsDecks !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.decks',
|
||||
knownWordsDecks,
|
||||
context.resolved.ankiConnect.knownWords.decks,
|
||||
'Expected an object mapping deck names to field arrays.',
|
||||
);
|
||||
} else if (Array.isArray(legacyNPlusOneDecks)) {
|
||||
const normalized = legacyNPlusOneDecks
|
||||
.filter((entry): entry is string => typeof entry === 'string')
|
||||
.map((entry) => entry.trim())
|
||||
.filter((entry) => entry.length > 0);
|
||||
const resolved: Record<string, string[]> = {};
|
||||
for (const deck of new Set(normalized)) {
|
||||
resolved[deck] = DEFAULT_FIELDS;
|
||||
}
|
||||
context.resolved.ankiConnect.knownWords.decks = resolved;
|
||||
if (normalized.length > 0) {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.decks',
|
||||
nPlusOneDecks,
|
||||
context.resolved.ankiConnect.nPlusOne.decks,
|
||||
'Expected an array of strings.',
|
||||
legacyNPlusOneDecks,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.decks,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.decks with object format',
|
||||
);
|
||||
} else {
|
||||
context.resolved.ankiConnect.nPlusOne.decks = [];
|
||||
}
|
||||
} else if (nPlusOneDecks !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.decks',
|
||||
nPlusOneDecks,
|
||||
context.resolved.ankiConnect.nPlusOne.decks,
|
||||
'Expected an array of strings.',
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.decks = [];
|
||||
}
|
||||
|
||||
const nPlusOneHighlightColor = asColor(nPlusOneConfig.nPlusOne);
|
||||
@@ -801,17 +959,34 @@ export function applyAnkiConnectResolution(context: ResolveContext): void {
|
||||
context.resolved.ankiConnect.nPlusOne.nPlusOne = DEFAULT_CONFIG.ankiConnect.nPlusOne.nPlusOne;
|
||||
}
|
||||
|
||||
const nPlusOneKnownWordColor = asColor(nPlusOneConfig.knownWord);
|
||||
if (nPlusOneKnownWordColor !== undefined) {
|
||||
context.resolved.ankiConnect.nPlusOne.knownWord = nPlusOneKnownWordColor;
|
||||
const knownWordsColor = asColor(knownWordsConfig.color);
|
||||
const legacyNPlusOneKnownWordColor = asColor(nPlusOneConfig.knownWord);
|
||||
if (knownWordsColor !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.color = knownWordsColor;
|
||||
} else if (knownWordsConfig.color !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.knownWords.color',
|
||||
knownWordsConfig.color,
|
||||
context.resolved.ankiConnect.knownWords.color,
|
||||
'Expected a hex color value.',
|
||||
);
|
||||
context.resolved.ankiConnect.knownWords.color = DEFAULT_CONFIG.ankiConnect.knownWords.color;
|
||||
} else if (legacyNPlusOneKnownWordColor !== undefined) {
|
||||
context.resolved.ankiConnect.knownWords.color = legacyNPlusOneKnownWordColor;
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.knownWord',
|
||||
nPlusOneConfig.knownWord,
|
||||
DEFAULT_CONFIG.ankiConnect.knownWords.color,
|
||||
'Legacy key is deprecated; use ankiConnect.knownWords.color',
|
||||
);
|
||||
} else if (nPlusOneConfig.knownWord !== undefined) {
|
||||
context.warn(
|
||||
'ankiConnect.nPlusOne.knownWord',
|
||||
nPlusOneConfig.knownWord,
|
||||
context.resolved.ankiConnect.nPlusOne.knownWord,
|
||||
context.resolved.ankiConnect.knownWords.color,
|
||||
'Expected a hex color value.',
|
||||
);
|
||||
context.resolved.ankiConnect.nPlusOne.knownWord = DEFAULT_CONFIG.ankiConnect.nPlusOne.knownWord;
|
||||
context.resolved.ankiConnect.knownWords.color = DEFAULT_CONFIG.ankiConnect.knownWords.color;
|
||||
}
|
||||
|
||||
if (
|
||||
|
||||
@@ -1,9 +1,68 @@
|
||||
import { ResolveContext } from './context';
|
||||
import { ImmersionTrackingRetentionMode, ImmersionTrackingRetentionPreset } from '../../types';
|
||||
import { asBoolean, asNumber, asString, isObject } from './shared';
|
||||
|
||||
const DEFAULT_RETENTION_MODE: ImmersionTrackingRetentionMode = 'preset';
|
||||
const DEFAULT_RETENTION_PRESET: ImmersionTrackingRetentionPreset = 'balanced';
|
||||
|
||||
const BASE_RETENTION = {
|
||||
eventsDays: 0,
|
||||
telemetryDays: 0,
|
||||
sessionsDays: 0,
|
||||
dailyRollupsDays: 0,
|
||||
monthlyRollupsDays: 0,
|
||||
vacuumIntervalDays: 0,
|
||||
};
|
||||
|
||||
const RETENTION_PRESETS: Record<ImmersionTrackingRetentionPreset, typeof BASE_RETENTION> = {
|
||||
minimal: {
|
||||
eventsDays: 3,
|
||||
telemetryDays: 14,
|
||||
sessionsDays: 14,
|
||||
dailyRollupsDays: 30,
|
||||
monthlyRollupsDays: 365,
|
||||
vacuumIntervalDays: 7,
|
||||
},
|
||||
balanced: BASE_RETENTION,
|
||||
'deep-history': {
|
||||
eventsDays: 14,
|
||||
telemetryDays: 60,
|
||||
sessionsDays: 60,
|
||||
dailyRollupsDays: 730,
|
||||
monthlyRollupsDays: 5 * 365,
|
||||
vacuumIntervalDays: 7,
|
||||
},
|
||||
};
|
||||
|
||||
const DEFAULT_LIFETIME_SUMMARIES = {
|
||||
global: true,
|
||||
anime: true,
|
||||
media: true,
|
||||
};
|
||||
|
||||
function asRetentionMode(value: unknown): value is ImmersionTrackingRetentionMode {
|
||||
return value === 'preset' || value === 'advanced';
|
||||
}
|
||||
|
||||
function asRetentionPreset(value: unknown): value is ImmersionTrackingRetentionPreset {
|
||||
return value === 'minimal' || value === 'balanced' || value === 'deep-history';
|
||||
}
|
||||
|
||||
export function applyImmersionTrackingConfig(context: ResolveContext): void {
|
||||
const { src, resolved, warn } = context;
|
||||
|
||||
if (!isObject(src.immersionTracking)) {
|
||||
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
|
||||
resolved.immersionTracking.retentionPreset = DEFAULT_RETENTION_PRESET;
|
||||
resolved.immersionTracking.retention = {
|
||||
...BASE_RETENTION,
|
||||
};
|
||||
resolved.immersionTracking.lifetimeSummaries = {
|
||||
...DEFAULT_LIFETIME_SUMMARIES,
|
||||
};
|
||||
return;
|
||||
}
|
||||
|
||||
if (isObject(src.immersionTracking)) {
|
||||
const enabled = asBoolean(src.immersionTracking.enabled);
|
||||
if (enabled !== undefined) {
|
||||
@@ -93,81 +152,186 @@ export function applyImmersionTrackingConfig(context: ResolveContext): void {
|
||||
);
|
||||
}
|
||||
|
||||
const retentionMode = asString(src.immersionTracking.retentionMode);
|
||||
if (asRetentionMode(retentionMode)) {
|
||||
resolved.immersionTracking.retentionMode = retentionMode;
|
||||
} else if (src.immersionTracking.retentionMode !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retentionMode',
|
||||
src.immersionTracking.retentionMode,
|
||||
DEFAULT_RETENTION_MODE,
|
||||
'Expected "preset" or "advanced".',
|
||||
);
|
||||
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
|
||||
} else {
|
||||
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
|
||||
}
|
||||
|
||||
const retentionPreset = asString(src.immersionTracking.retentionPreset);
|
||||
if (asRetentionPreset(retentionPreset)) {
|
||||
resolved.immersionTracking.retentionPreset = retentionPreset;
|
||||
} else if (src.immersionTracking.retentionPreset !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retentionPreset',
|
||||
src.immersionTracking.retentionPreset,
|
||||
DEFAULT_RETENTION_PRESET,
|
||||
'Expected "minimal", "balanced", or "deep-history".',
|
||||
);
|
||||
resolved.immersionTracking.retentionPreset = DEFAULT_RETENTION_PRESET;
|
||||
} else {
|
||||
resolved.immersionTracking.retentionPreset =
|
||||
resolved.immersionTracking.retentionPreset ?? DEFAULT_RETENTION_PRESET;
|
||||
}
|
||||
|
||||
const resolvedPreset =
|
||||
resolved.immersionTracking.retentionPreset === 'minimal' ||
|
||||
resolved.immersionTracking.retentionPreset === 'balanced' ||
|
||||
resolved.immersionTracking.retentionPreset === 'deep-history'
|
||||
? resolved.immersionTracking.retentionPreset
|
||||
: DEFAULT_RETENTION_PRESET;
|
||||
|
||||
const baseRetention =
|
||||
resolved.immersionTracking.retentionMode === 'preset'
|
||||
? RETENTION_PRESETS[resolvedPreset]
|
||||
: BASE_RETENTION;
|
||||
|
||||
const retention = {
|
||||
eventsDays: baseRetention.eventsDays,
|
||||
telemetryDays: baseRetention.telemetryDays,
|
||||
sessionsDays: baseRetention.sessionsDays,
|
||||
dailyRollupsDays: baseRetention.dailyRollupsDays,
|
||||
monthlyRollupsDays: baseRetention.monthlyRollupsDays,
|
||||
vacuumIntervalDays: baseRetention.vacuumIntervalDays,
|
||||
};
|
||||
|
||||
if (isObject(src.immersionTracking.retention)) {
|
||||
const eventsDays = asNumber(src.immersionTracking.retention.eventsDays);
|
||||
if (eventsDays !== undefined && eventsDays >= 1 && eventsDays <= 3650) {
|
||||
resolved.immersionTracking.retention.eventsDays = Math.floor(eventsDays);
|
||||
if (eventsDays !== undefined && eventsDays >= 0 && eventsDays <= 3650) {
|
||||
retention.eventsDays = Math.floor(eventsDays);
|
||||
} else if (src.immersionTracking.retention.eventsDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.eventsDays',
|
||||
src.immersionTracking.retention.eventsDays,
|
||||
resolved.immersionTracking.retention.eventsDays,
|
||||
'Expected integer between 1 and 3650.',
|
||||
retention.eventsDays,
|
||||
'Expected integer between 0 and 3650.',
|
||||
);
|
||||
}
|
||||
|
||||
const telemetryDays = asNumber(src.immersionTracking.retention.telemetryDays);
|
||||
if (telemetryDays !== undefined && telemetryDays >= 1 && telemetryDays <= 3650) {
|
||||
resolved.immersionTracking.retention.telemetryDays = Math.floor(telemetryDays);
|
||||
if (telemetryDays !== undefined && telemetryDays >= 0 && telemetryDays <= 3650) {
|
||||
retention.telemetryDays = Math.floor(telemetryDays);
|
||||
} else if (src.immersionTracking.retention.telemetryDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.telemetryDays',
|
||||
src.immersionTracking.retention.telemetryDays,
|
||||
resolved.immersionTracking.retention.telemetryDays,
|
||||
'Expected integer between 1 and 3650.',
|
||||
retention.telemetryDays,
|
||||
'Expected integer between 0 and 3650.',
|
||||
);
|
||||
}
|
||||
|
||||
const sessionsDays = asNumber(src.immersionTracking.retention.sessionsDays);
|
||||
if (sessionsDays !== undefined && sessionsDays >= 0 && sessionsDays <= 3650) {
|
||||
retention.sessionsDays = Math.floor(sessionsDays);
|
||||
} else if (src.immersionTracking.retention.sessionsDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.sessionsDays',
|
||||
src.immersionTracking.retention.sessionsDays,
|
||||
retention.sessionsDays,
|
||||
'Expected integer between 0 and 3650.',
|
||||
);
|
||||
}
|
||||
|
||||
const dailyRollupsDays = asNumber(src.immersionTracking.retention.dailyRollupsDays);
|
||||
if (dailyRollupsDays !== undefined && dailyRollupsDays >= 1 && dailyRollupsDays <= 36500) {
|
||||
resolved.immersionTracking.retention.dailyRollupsDays = Math.floor(dailyRollupsDays);
|
||||
if (dailyRollupsDays !== undefined && dailyRollupsDays >= 0 && dailyRollupsDays <= 36500) {
|
||||
retention.dailyRollupsDays = Math.floor(dailyRollupsDays);
|
||||
} else if (src.immersionTracking.retention.dailyRollupsDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.dailyRollupsDays',
|
||||
src.immersionTracking.retention.dailyRollupsDays,
|
||||
resolved.immersionTracking.retention.dailyRollupsDays,
|
||||
'Expected integer between 1 and 36500.',
|
||||
retention.dailyRollupsDays,
|
||||
'Expected integer between 0 and 36500.',
|
||||
);
|
||||
}
|
||||
|
||||
const monthlyRollupsDays = asNumber(src.immersionTracking.retention.monthlyRollupsDays);
|
||||
if (
|
||||
monthlyRollupsDays !== undefined &&
|
||||
monthlyRollupsDays >= 1 &&
|
||||
monthlyRollupsDays >= 0 &&
|
||||
monthlyRollupsDays <= 36500
|
||||
) {
|
||||
resolved.immersionTracking.retention.monthlyRollupsDays = Math.floor(monthlyRollupsDays);
|
||||
retention.monthlyRollupsDays = Math.floor(monthlyRollupsDays);
|
||||
} else if (src.immersionTracking.retention.monthlyRollupsDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.monthlyRollupsDays',
|
||||
src.immersionTracking.retention.monthlyRollupsDays,
|
||||
resolved.immersionTracking.retention.monthlyRollupsDays,
|
||||
'Expected integer between 1 and 36500.',
|
||||
retention.monthlyRollupsDays,
|
||||
'Expected integer between 0 and 36500.',
|
||||
);
|
||||
}
|
||||
|
||||
const vacuumIntervalDays = asNumber(src.immersionTracking.retention.vacuumIntervalDays);
|
||||
if (
|
||||
vacuumIntervalDays !== undefined &&
|
||||
vacuumIntervalDays >= 1 &&
|
||||
vacuumIntervalDays >= 0 &&
|
||||
vacuumIntervalDays <= 3650
|
||||
) {
|
||||
resolved.immersionTracking.retention.vacuumIntervalDays = Math.floor(vacuumIntervalDays);
|
||||
retention.vacuumIntervalDays = Math.floor(vacuumIntervalDays);
|
||||
} else if (src.immersionTracking.retention.vacuumIntervalDays !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention.vacuumIntervalDays',
|
||||
src.immersionTracking.retention.vacuumIntervalDays,
|
||||
resolved.immersionTracking.retention.vacuumIntervalDays,
|
||||
'Expected integer between 1 and 3650.',
|
||||
retention.vacuumIntervalDays,
|
||||
'Expected integer between 0 and 3650.',
|
||||
);
|
||||
}
|
||||
} else if (src.immersionTracking.retention !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.retention',
|
||||
src.immersionTracking.retention,
|
||||
resolved.immersionTracking.retention,
|
||||
baseRetention,
|
||||
'Expected object.',
|
||||
);
|
||||
}
|
||||
|
||||
resolved.immersionTracking.retention = {
|
||||
eventsDays: retention.eventsDays,
|
||||
telemetryDays: retention.telemetryDays,
|
||||
sessionsDays: retention.sessionsDays,
|
||||
dailyRollupsDays: retention.dailyRollupsDays,
|
||||
monthlyRollupsDays: retention.monthlyRollupsDays,
|
||||
vacuumIntervalDays: retention.vacuumIntervalDays,
|
||||
};
|
||||
|
||||
const lifetimeSummaries = {
|
||||
global: DEFAULT_LIFETIME_SUMMARIES.global,
|
||||
anime: DEFAULT_LIFETIME_SUMMARIES.anime,
|
||||
media: DEFAULT_LIFETIME_SUMMARIES.media,
|
||||
};
|
||||
|
||||
if (isObject(src.immersionTracking.lifetimeSummaries)) {
|
||||
const global = asBoolean(src.immersionTracking.lifetimeSummaries.global);
|
||||
if (global !== undefined) {
|
||||
lifetimeSummaries.global = global;
|
||||
}
|
||||
|
||||
const anime = asBoolean(src.immersionTracking.lifetimeSummaries.anime);
|
||||
if (anime !== undefined) {
|
||||
lifetimeSummaries.anime = anime;
|
||||
}
|
||||
|
||||
const media = asBoolean(src.immersionTracking.lifetimeSummaries.media);
|
||||
if (media !== undefined) {
|
||||
lifetimeSummaries.media = media;
|
||||
}
|
||||
} else if (src.immersionTracking.lifetimeSummaries !== undefined) {
|
||||
warn(
|
||||
'immersionTracking.lifetimeSummaries',
|
||||
src.immersionTracking.lifetimeSummaries,
|
||||
DEFAULT_LIFETIME_SUMMARIES,
|
||||
'Expected object.',
|
||||
);
|
||||
}
|
||||
|
||||
resolved.immersionTracking.lifetimeSummaries = lifetimeSummaries;
|
||||
}
|
||||
}
|
||||
|
||||
53
src/config/resolve/stats.ts
Normal file
53
src/config/resolve/stats.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { ResolveContext } from './context';
|
||||
import { asBoolean, asNumber, asString, isObject } from './shared';
|
||||
|
||||
export function applyStatsConfig(context: ResolveContext): void {
|
||||
const { src, resolved, warn } = context;
|
||||
|
||||
if (!isObject(src.stats)) return;
|
||||
|
||||
const toggleKey = asString(src.stats.toggleKey);
|
||||
if (toggleKey !== undefined) {
|
||||
resolved.stats.toggleKey = toggleKey;
|
||||
} else if (src.stats.toggleKey !== undefined) {
|
||||
warn('stats.toggleKey', src.stats.toggleKey, resolved.stats.toggleKey, 'Expected string.');
|
||||
}
|
||||
|
||||
const markWatchedKey = asString(src.stats.markWatchedKey);
|
||||
if (markWatchedKey !== undefined) {
|
||||
resolved.stats.markWatchedKey = markWatchedKey;
|
||||
} else if (src.stats.markWatchedKey !== undefined) {
|
||||
warn('stats.markWatchedKey', src.stats.markWatchedKey, resolved.stats.markWatchedKey, 'Expected string.');
|
||||
}
|
||||
|
||||
const serverPort = asNumber(src.stats.serverPort);
|
||||
if (serverPort !== undefined) {
|
||||
resolved.stats.serverPort = serverPort;
|
||||
} else if (src.stats.serverPort !== undefined) {
|
||||
warn('stats.serverPort', src.stats.serverPort, resolved.stats.serverPort, 'Expected number.');
|
||||
}
|
||||
|
||||
const autoStartServer = asBoolean(src.stats.autoStartServer);
|
||||
if (autoStartServer !== undefined) {
|
||||
resolved.stats.autoStartServer = autoStartServer;
|
||||
} else if (src.stats.autoStartServer !== undefined) {
|
||||
warn(
|
||||
'stats.autoStartServer',
|
||||
src.stats.autoStartServer,
|
||||
resolved.stats.autoStartServer,
|
||||
'Expected boolean.',
|
||||
);
|
||||
}
|
||||
|
||||
const autoOpenBrowser = asBoolean(src.stats.autoOpenBrowser);
|
||||
if (autoOpenBrowser !== undefined) {
|
||||
resolved.stats.autoOpenBrowser = autoOpenBrowser;
|
||||
} else if (src.stats.autoOpenBrowser !== undefined) {
|
||||
warn(
|
||||
'stats.autoOpenBrowser',
|
||||
src.stats.autoOpenBrowser,
|
||||
resolved.stats.autoOpenBrowser,
|
||||
'Expected boolean.',
|
||||
);
|
||||
}
|
||||
}
|
||||
1113
src/core/services/__tests__/stats-server.test.ts
Normal file
1113
src/core/services/__tests__/stats-server.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,7 @@ test('guessAnilistMediaInfo uses guessit output when available', async () => {
|
||||
});
|
||||
assert.deepEqual(result, {
|
||||
title: 'Guessit Title',
|
||||
season: null,
|
||||
episode: 7,
|
||||
source: 'guessit',
|
||||
});
|
||||
@@ -29,6 +30,7 @@ test('guessAnilistMediaInfo falls back to parser when guessit fails', async () =
|
||||
});
|
||||
assert.deepEqual(result, {
|
||||
title: 'My Anime',
|
||||
season: 1,
|
||||
episode: 3,
|
||||
source: 'fallback',
|
||||
});
|
||||
@@ -52,6 +54,7 @@ test('guessAnilistMediaInfo uses basename for guessit input', async () => {
|
||||
]);
|
||||
assert.deepEqual(result, {
|
||||
title: 'Rascal Does Not Dream of Bunny Girl Senpai',
|
||||
season: null,
|
||||
episode: 1,
|
||||
source: 'guessit',
|
||||
});
|
||||
@@ -67,6 +70,7 @@ test('guessAnilistMediaInfo joins multi-part guessit titles', async () => {
|
||||
});
|
||||
assert.deepEqual(result, {
|
||||
title: 'Rascal Does not Dream of Bunny Girl Senpai',
|
||||
season: null,
|
||||
episode: 1,
|
||||
source: 'guessit',
|
||||
});
|
||||
|
||||
@@ -7,6 +7,7 @@ const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
|
||||
|
||||
export interface AnilistMediaGuess {
|
||||
title: string;
|
||||
season: number | null;
|
||||
episode: number | null;
|
||||
source: 'guessit' | 'fallback';
|
||||
}
|
||||
@@ -56,7 +57,7 @@ interface AnilistSaveEntryData {
|
||||
};
|
||||
}
|
||||
|
||||
function runGuessit(target: string): Promise<string> {
|
||||
export function runGuessit(target: string): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
childProcess.execFile(
|
||||
'guessit',
|
||||
@@ -73,9 +74,9 @@ function runGuessit(target: string): Promise<string> {
|
||||
});
|
||||
}
|
||||
|
||||
type GuessAnilistMediaInfoDeps = {
|
||||
export interface GuessAnilistMediaInfoDeps {
|
||||
runGuessit: (target: string) => Promise<string>;
|
||||
};
|
||||
}
|
||||
|
||||
function firstString(value: unknown): string | null {
|
||||
if (typeof value === 'string') {
|
||||
@@ -215,8 +216,9 @@ export async function guessAnilistMediaInfo(
|
||||
const parsed = JSON.parse(stdout) as Record<string, unknown>;
|
||||
const title = readGuessitTitle(parsed.title);
|
||||
const episode = firstPositiveInteger(parsed.episode);
|
||||
const season = firstPositiveInteger(parsed.season);
|
||||
if (title) {
|
||||
return { title, episode, source: 'guessit' };
|
||||
return { title, season, episode, source: 'guessit' };
|
||||
}
|
||||
} catch {
|
||||
// Ignore guessit failures and fall back to internal parser.
|
||||
@@ -230,6 +232,7 @@ export async function guessAnilistMediaInfo(
|
||||
}
|
||||
return {
|
||||
title: parsed.title.trim(),
|
||||
season: parsed.season,
|
||||
episode: parsed.episode,
|
||||
source: 'fallback',
|
||||
};
|
||||
|
||||
244
src/core/services/anilist/cover-art-fetcher.test.ts
Normal file
244
src/core/services/anilist/cover-art-fetcher.test.ts
Normal file
@@ -0,0 +1,244 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
import { createCoverArtFetcher, stripFilenameTags } from './cover-art-fetcher.js';
|
||||
import { Database } from '../immersion-tracker/sqlite.js';
|
||||
import { ensureSchema, getOrCreateVideoRecord } from '../immersion-tracker/storage.js';
|
||||
import { getCoverArt, upsertCoverArt } from '../immersion-tracker/query.js';
|
||||
import { SOURCE_TYPE_LOCAL } from '../immersion-tracker/types.js';
|
||||
|
||||
function makeDbPath(): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-cover-art-test-'));
|
||||
return path.join(dir, 'immersion.sqlite');
|
||||
}
|
||||
|
||||
function cleanupDbPath(dbPath: string): void {
|
||||
fs.rmSync(path.dirname(dbPath), { recursive: true, force: true });
|
||||
}
|
||||
|
||||
test('stripFilenameTags normalizes common media-title formats', () => {
|
||||
assert.equal(
|
||||
stripFilenameTags('[Jellyfin/direct] The Eminence in Shadow S01E05 I Am...'),
|
||||
'The Eminence in Shadow',
|
||||
);
|
||||
assert.equal(
|
||||
stripFilenameTags(
|
||||
'[Foxtrot] Kono Subarashii Sekai ni Shukufuku wo! S2 - 05: Servitude for this Masked Knight!',
|
||||
),
|
||||
'Kono Subarashii Sekai ni Shukufuku wo!',
|
||||
);
|
||||
assert.equal(
|
||||
stripFilenameTags('Kono Subarashii Sekai ni Shukufuku wo! E03: A Panty Treasure'),
|
||||
'Kono Subarashii Sekai ni Shukufuku wo!',
|
||||
);
|
||||
assert.equal(
|
||||
stripFilenameTags(
|
||||
'Little Witch Academia (2017) - S01E05 - 005 - Pact of the Dragon [Bluray-1080p][10bit][h265][FLAC 2.0][JA]-FumeiRaws.mkv',
|
||||
),
|
||||
'Little Witch Academia',
|
||||
);
|
||||
});
|
||||
|
||||
test('fetchIfMissing backfills a missing blob from an existing cover URL', async () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
ensureSchema(db);
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/cover-fetcher-test.mkv', {
|
||||
canonicalTitle: 'Cover Fetcher Test',
|
||||
sourcePath: '/tmp/cover-fetcher-test.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
upsertCoverArt(db, videoId, {
|
||||
anilistId: 7,
|
||||
coverUrl: 'https://images.test/cover.jpg',
|
||||
coverBlob: null,
|
||||
titleRomaji: 'Test Title',
|
||||
titleEnglish: 'Test Title',
|
||||
episodesTotal: 12,
|
||||
});
|
||||
|
||||
const fetchCalls: string[] = [];
|
||||
const originalFetch = globalThis.fetch;
|
||||
globalThis.fetch = (async (input: RequestInfo | URL) => {
|
||||
const url = String(input);
|
||||
fetchCalls.push(url);
|
||||
assert.equal(url, 'https://images.test/cover.jpg');
|
||||
return new Response(new Uint8Array([1, 2, 3, 4]), {
|
||||
status: 200,
|
||||
headers: { 'Content-Type': 'image/jpeg' },
|
||||
});
|
||||
}) as typeof fetch;
|
||||
|
||||
try {
|
||||
const fetcher = createCoverArtFetcher(
|
||||
{
|
||||
acquire: async () => {},
|
||||
recordResponse: () => {},
|
||||
},
|
||||
console,
|
||||
);
|
||||
|
||||
const fetched = await fetcher.fetchIfMissing(
|
||||
db,
|
||||
videoId,
|
||||
'[Jellyfin] Little Witch Academia S02E05 - 025 - Pact of the Dragon (2020) [1080p].mkv',
|
||||
);
|
||||
const stored = getCoverArt(db, videoId);
|
||||
|
||||
assert.equal(fetched, true);
|
||||
assert.equal(fetchCalls.length, 1);
|
||||
assert.equal(stored?.coverBlob?.length, 4);
|
||||
assert.equal(stored?.titleEnglish, 'Test Title');
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch;
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
function createJsonResponse(payload: unknown): Response {
|
||||
return new Response(JSON.stringify(payload), {
|
||||
status: 200,
|
||||
headers: { 'content-type': 'application/json' },
|
||||
});
|
||||
}
|
||||
|
||||
test('fetchIfMissing uses guessit primary title and season when available', async () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
ensureSchema(db);
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/cover-fetcher-season-test.mkv', {
|
||||
canonicalTitle:
|
||||
'[Jellyfin] Little Witch Academia S02E05 - 025 - Pact of the Dragon (2020) [1080p].mkv',
|
||||
sourcePath: '/tmp/cover-fetcher-season-test.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
|
||||
const searchCalls: Array<{ search: string }> = [];
|
||||
const originalFetch = globalThis.fetch;
|
||||
globalThis.fetch = ((input: RequestInfo | URL, init?: RequestInit) => {
|
||||
const raw = (init?.body as string | undefined) ?? '';
|
||||
const payload = JSON.parse(raw) as { variables: { search: string } };
|
||||
const search = payload.variables.search;
|
||||
searchCalls.push({ search });
|
||||
|
||||
if (search.includes('Season 2')) {
|
||||
return Promise.resolve(createJsonResponse({ data: { Page: { media: [] } } }));
|
||||
}
|
||||
|
||||
return Promise.resolve(
|
||||
createJsonResponse({
|
||||
data: {
|
||||
Page: {
|
||||
media: [
|
||||
{
|
||||
id: 19,
|
||||
episodes: 24,
|
||||
coverImage: { large: 'https://images.test/cover.jpg', medium: null },
|
||||
title: {
|
||||
romaji: 'Little Witch Academia',
|
||||
english: 'Little Witch Academia',
|
||||
native: null,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
}) as typeof fetch;
|
||||
|
||||
try {
|
||||
const fetcher = createCoverArtFetcher(
|
||||
{
|
||||
acquire: async () => {},
|
||||
recordResponse: () => {},
|
||||
},
|
||||
console,
|
||||
{
|
||||
runGuessit: async () =>
|
||||
JSON.stringify({ title: 'Little Witch Academia', season: 2, episode: 5 }),
|
||||
},
|
||||
);
|
||||
|
||||
const fetched = await fetcher.fetchIfMissing(db, videoId, 'School Vlog S01E01');
|
||||
const stored = getCoverArt(db, videoId);
|
||||
|
||||
assert.equal(fetched, true);
|
||||
assert.equal(searchCalls.length, 2);
|
||||
assert.equal(searchCalls[0]!.search, 'Little Witch Academia Season 2');
|
||||
assert.equal(stored?.anilistId, 19);
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch;
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('fetchIfMissing falls back to internal parser when guessit throws', async () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
ensureSchema(db);
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/cover-fetcher-fallback-test.mkv', {
|
||||
canonicalTitle: 'School Vlog S01E01',
|
||||
sourcePath: '/tmp/cover-fetcher-fallback-test.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
|
||||
let requestCount = 0;
|
||||
const originalFetch = globalThis.fetch;
|
||||
globalThis.fetch = ((input: RequestInfo | URL, init?: RequestInit) => {
|
||||
requestCount += 1;
|
||||
const raw = (init?.body as string | undefined) ?? '';
|
||||
const payload = JSON.parse(raw) as { variables: { search: string } };
|
||||
assert.equal(payload.variables.search, 'School Vlog');
|
||||
|
||||
return Promise.resolve(
|
||||
createJsonResponse({
|
||||
data: {
|
||||
Page: {
|
||||
media: [
|
||||
{
|
||||
id: 21,
|
||||
episodes: 12,
|
||||
coverImage: { large: 'https://images.test/fallback-cover.jpg', medium: null },
|
||||
title: { romaji: 'School Vlog', english: 'School Vlog', native: null },
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
}) as typeof fetch;
|
||||
|
||||
try {
|
||||
const fetcher = createCoverArtFetcher(
|
||||
{
|
||||
acquire: async () => {},
|
||||
recordResponse: () => {},
|
||||
},
|
||||
console,
|
||||
{
|
||||
runGuessit: async () => {
|
||||
throw new Error('guessit unavailable');
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const fetched = await fetcher.fetchIfMissing(db, videoId, 'Ignored Title');
|
||||
const stored = getCoverArt(db, videoId);
|
||||
|
||||
assert.equal(fetched, true);
|
||||
assert.equal(requestCount, 2);
|
||||
assert.equal(stored?.anilistId, 21);
|
||||
} finally {
|
||||
globalThis.fetch = originalFetch;
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
435
src/core/services/anilist/cover-art-fetcher.ts
Normal file
435
src/core/services/anilist/cover-art-fetcher.ts
Normal file
@@ -0,0 +1,435 @@
|
||||
import type { AnilistRateLimiter } from './rate-limiter';
|
||||
import type { DatabaseSync } from '../immersion-tracker/sqlite';
|
||||
import { getCoverArt, upsertCoverArt, updateAnimeAnilistInfo } from '../immersion-tracker/query';
|
||||
import {
|
||||
guessAnilistMediaInfo,
|
||||
runGuessit,
|
||||
type GuessAnilistMediaInfoDeps,
|
||||
} from './anilist-updater';
|
||||
|
||||
const ANILIST_GRAPHQL_URL = 'https://graphql.anilist.co';
|
||||
const NO_MATCH_RETRY_MS = 5 * 60 * 1000;
|
||||
|
||||
const SEARCH_QUERY = `
|
||||
query ($search: String!) {
|
||||
Page(perPage: 5) {
|
||||
media(search: $search, type: ANIME) {
|
||||
id
|
||||
episodes
|
||||
season
|
||||
seasonYear
|
||||
coverImage { large medium }
|
||||
title { romaji english native }
|
||||
}
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
interface AnilistMedia {
|
||||
id: number;
|
||||
episodes: number | null;
|
||||
season: string | null;
|
||||
seasonYear: number | null;
|
||||
coverImage: { large: string | null; medium: string | null } | null;
|
||||
title: { romaji: string | null; english: string | null; native: string | null } | null;
|
||||
}
|
||||
|
||||
interface AnilistSearchResponse {
|
||||
data?: {
|
||||
Page?: {
|
||||
media?: AnilistMedia[];
|
||||
};
|
||||
};
|
||||
errors?: Array<{ message?: string }>;
|
||||
}
|
||||
|
||||
export interface CoverArtFetcher {
|
||||
fetchIfMissing(db: DatabaseSync, videoId: number, canonicalTitle: string): Promise<boolean>;
|
||||
}
|
||||
|
||||
interface Logger {
|
||||
info(msg: string, ...args: unknown[]): void;
|
||||
warn(msg: string, ...args: unknown[]): void;
|
||||
error(msg: string, ...args: unknown[]): void;
|
||||
}
|
||||
|
||||
interface CoverArtCandidate {
|
||||
title: string;
|
||||
source: 'guessit' | 'fallback';
|
||||
season: number | null;
|
||||
episode: number | null;
|
||||
}
|
||||
|
||||
interface CoverArtFetcherOptions {
|
||||
runGuessit?: GuessAnilistMediaInfoDeps['runGuessit'];
|
||||
}
|
||||
|
||||
export function stripFilenameTags(raw: string): string {
|
||||
let title = raw.replace(/\.[A-Za-z0-9]{2,4}$/, '');
|
||||
|
||||
title = title.replace(/^(?:\s*\[[^\]]*\]\s*)+/, '');
|
||||
title = title.replace(/[._]+/g, ' ');
|
||||
|
||||
// Remove everything from " - S##E##" or " - ###" onward (season/episode markers)
|
||||
title = title.replace(/\s+-\s+S\d+E\d+.*$/i, '');
|
||||
title = title.replace(/\s+-\s+\d{2,}(\s+-\s+\d+)?(\s+-.+)?$/, '');
|
||||
title = title.replace(/\s+S\d+E\d+.*$/i, '');
|
||||
title = title.replace(/\s+S\d+\s*[- ]\s*\d+[: -].*$/i, '');
|
||||
title = title.replace(/\s+E\d+[: -].*$/i, '');
|
||||
title = title.replace(/^S\d+E\d+\s*[- ]\s*/i, '');
|
||||
|
||||
// Remove bracketed/parenthesized tags: [WEBDL-1080p], (2022), etc.
|
||||
title = title.replace(/\s*\[[^\]]*\]\s*/g, ' ');
|
||||
title = title.replace(/\s*\([^)]*\d{4}[^)]*\)\s*/g, ' ');
|
||||
|
||||
// Remove common codec/source tags that may appear without brackets
|
||||
title = title.replace(
|
||||
/\b(WEBDL|WEBRip|BluRay|BDRip|HDTV|DVDRip|x264|x265|H\.?264|H\.?265|AV1|AAC|FLAC|Opus|10bit|8bit|1080p|720p|480p|2160p|4K)\b[-.\w]*/gi,
|
||||
'',
|
||||
);
|
||||
|
||||
// Remove trailing dashes and group tags like "-Retr0"
|
||||
title = title.replace(/\s*-\s*[\w]+$/, '');
|
||||
|
||||
return title.trim().replace(/\s{2,}/g, ' ');
|
||||
}
|
||||
|
||||
function removeSeasonHint(title: string): string {
|
||||
return title
|
||||
.replace(/\bseason\s*\d+\b/gi, '')
|
||||
.replace(/\s{2,}/g, ' ')
|
||||
.trim();
|
||||
}
|
||||
|
||||
function normalizeTitle(text: string): string {
|
||||
return text.trim().toLowerCase().replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
function extractCandidateSeasonHints(text: string): Set<number> {
|
||||
const normalized = normalizeTitle(text);
|
||||
const matches = [
|
||||
...normalized.matchAll(/\bseason\s*(\d{1,2})\b/gi),
|
||||
...normalized.matchAll(/\bs(\d{1,2})(?:\b|\D)/gi),
|
||||
];
|
||||
const values = new Set<number>();
|
||||
for (const match of matches) {
|
||||
const value = Number.parseInt(match[1]!, 10);
|
||||
if (Number.isInteger(value)) {
|
||||
values.add(value);
|
||||
}
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
function isSeasonMentioned(titles: string[], season: number | null): boolean {
|
||||
if (!season) {
|
||||
return false;
|
||||
}
|
||||
const hints = titles.flatMap((title) => [...extractCandidateSeasonHints(title)]);
|
||||
return hints.includes(season);
|
||||
}
|
||||
|
||||
function pickBestSearchResult(
|
||||
title: string,
|
||||
episode: number | null,
|
||||
season: number | null,
|
||||
media: AnilistMedia[],
|
||||
): { id: number; title: string } | null {
|
||||
const cleanedTitle = removeSeasonHint(title);
|
||||
const targets = [title, cleanedTitle]
|
||||
.map(normalizeTitle)
|
||||
.map((value) => value.trim())
|
||||
.filter((value, index, all) => value.length > 0 && all.indexOf(value) === index);
|
||||
|
||||
const filtered =
|
||||
episode === null
|
||||
? media
|
||||
: media.filter((item) => {
|
||||
const total = item.episodes;
|
||||
return total === null || total >= episode;
|
||||
});
|
||||
const candidates = filtered.length > 0 ? filtered : media;
|
||||
if (candidates.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const scored = candidates.map((item) => {
|
||||
const candidateTitles = [item.title?.romaji, item.title?.english, item.title?.native]
|
||||
.filter((value): value is string => typeof value === 'string')
|
||||
.map((value) => normalizeTitle(value));
|
||||
|
||||
let score = 0;
|
||||
|
||||
for (const target of targets) {
|
||||
if (candidateTitles.includes(target)) {
|
||||
score += 120;
|
||||
continue;
|
||||
}
|
||||
if (candidateTitles.some((itemTitle) => itemTitle.includes(target))) {
|
||||
score += 30;
|
||||
}
|
||||
if (candidateTitles.some((itemTitle) => target.includes(itemTitle))) {
|
||||
score += 10;
|
||||
}
|
||||
}
|
||||
|
||||
if (episode !== null && item.episodes === episode) {
|
||||
score += 20;
|
||||
}
|
||||
|
||||
if (season !== null && isSeasonMentioned(candidateTitles, season)) {
|
||||
score += 15;
|
||||
}
|
||||
|
||||
return { item, score };
|
||||
});
|
||||
|
||||
scored.sort((a, b) => {
|
||||
if (b.score !== a.score) return b.score - a.score;
|
||||
return b.item.id - a.item.id;
|
||||
});
|
||||
|
||||
const selected = scored[0]!;
|
||||
const selectedTitle =
|
||||
selected.item.title?.english ??
|
||||
selected.item.title?.romaji ??
|
||||
selected.item.title?.native ??
|
||||
title;
|
||||
return { id: selected.item.id, title: selectedTitle };
|
||||
}
|
||||
|
||||
function buildSearchCandidates(parsed: CoverArtCandidate): string[] {
|
||||
const candidateTitles = [
|
||||
...(parsed.source === 'guessit' && parsed.season !== null && parsed.season > 1
|
||||
? [`${parsed.title} Season ${parsed.season}`]
|
||||
: []),
|
||||
parsed.title,
|
||||
];
|
||||
return candidateTitles
|
||||
.map((title) => title.trim())
|
||||
.filter((title, index, all) => title.length > 0 && all.indexOf(title) === index);
|
||||
}
|
||||
|
||||
async function searchAnilist(
|
||||
rateLimiter: AnilistRateLimiter,
|
||||
title: string,
|
||||
): Promise<{ media: AnilistMedia[]; rateLimited: boolean }> {
|
||||
await rateLimiter.acquire();
|
||||
|
||||
const res = await fetch(ANILIST_GRAPHQL_URL, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
|
||||
body: JSON.stringify({ query: SEARCH_QUERY, variables: { search: title } }),
|
||||
});
|
||||
|
||||
rateLimiter.recordResponse(res.headers);
|
||||
|
||||
if (res.status === 429) {
|
||||
return { media: [], rateLimited: true };
|
||||
}
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Anilist search failed: ${res.status} ${res.statusText}`);
|
||||
}
|
||||
|
||||
const json = (await res.json()) as AnilistSearchResponse;
|
||||
const mediaList = json.data?.Page?.media;
|
||||
if (!mediaList || mediaList.length === 0) {
|
||||
return { media: [], rateLimited: false };
|
||||
}
|
||||
|
||||
return { media: mediaList, rateLimited: false };
|
||||
}
|
||||
|
||||
async function downloadImage(url: string): Promise<Buffer | null> {
|
||||
try {
|
||||
const res = await fetch(url);
|
||||
if (!res.ok) return null;
|
||||
const arrayBuf = await res.arrayBuffer();
|
||||
return Buffer.from(arrayBuf);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
export function createCoverArtFetcher(
|
||||
rateLimiter: AnilistRateLimiter,
|
||||
logger: Logger,
|
||||
options: CoverArtFetcherOptions = {},
|
||||
): CoverArtFetcher {
|
||||
const resolveCanonicalTitle = (
|
||||
db: DatabaseSync,
|
||||
videoId: number,
|
||||
fallbackTitle: string,
|
||||
): string => {
|
||||
const row = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT canonical_title AS canonicalTitle
|
||||
FROM imm_videos
|
||||
WHERE video_id = ?
|
||||
LIMIT 1
|
||||
`,
|
||||
)
|
||||
.get(videoId) as { canonicalTitle: string | null } | undefined;
|
||||
return row?.canonicalTitle?.trim() || fallbackTitle;
|
||||
};
|
||||
|
||||
const resolveMediaInfo = async (
|
||||
db: DatabaseSync,
|
||||
videoId: number,
|
||||
canonicalTitle: string,
|
||||
): Promise<CoverArtCandidate | null> => {
|
||||
const effectiveTitle = resolveCanonicalTitle(db, videoId, canonicalTitle);
|
||||
const parsed = await guessAnilistMediaInfo(null, effectiveTitle, {
|
||||
runGuessit: options.runGuessit ?? runGuessit,
|
||||
});
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
title: parsed.title,
|
||||
season: parsed.season,
|
||||
episode: parsed.episode,
|
||||
source: parsed.source,
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
async fetchIfMissing(db, videoId, canonicalTitle): Promise<boolean> {
|
||||
const existing = getCoverArt(db, videoId);
|
||||
if (existing?.coverBlob) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (existing?.coverUrl) {
|
||||
const coverBlob = await downloadImage(existing.coverUrl);
|
||||
if (coverBlob) {
|
||||
upsertCoverArt(db, videoId, {
|
||||
anilistId: existing.anilistId,
|
||||
coverUrl: existing.coverUrl,
|
||||
coverBlob,
|
||||
titleRomaji: existing.titleRomaji,
|
||||
titleEnglish: existing.titleEnglish,
|
||||
episodesTotal: existing.episodesTotal,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
existing &&
|
||||
existing.coverUrl === null &&
|
||||
existing.anilistId === null &&
|
||||
Date.now() - existing.fetchedAtMs < NO_MATCH_RETRY_MS
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const effectiveTitle = resolveCanonicalTitle(db, videoId, canonicalTitle);
|
||||
const cleaned = stripFilenameTags(effectiveTitle);
|
||||
if (!cleaned) {
|
||||
logger.warn('cover-art: empty title after stripping tags for videoId=%d', videoId);
|
||||
upsertCoverArt(db, videoId, {
|
||||
anilistId: null,
|
||||
coverUrl: null,
|
||||
coverBlob: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
episodesTotal: null,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
const parsedInfo = await resolveMediaInfo(db, videoId, canonicalTitle);
|
||||
const searchBase = parsedInfo?.title ?? cleaned;
|
||||
const searchCandidates = parsedInfo ? buildSearchCandidates(parsedInfo) : [cleaned];
|
||||
|
||||
const effectiveCandidates = searchCandidates.includes(cleaned)
|
||||
? searchCandidates
|
||||
: [...searchCandidates, cleaned];
|
||||
|
||||
let selected: AnilistMedia | null = null;
|
||||
let rateLimited = false;
|
||||
|
||||
for (const candidate of effectiveCandidates) {
|
||||
logger.info('cover-art: searching Anilist for "%s" (videoId=%d)', candidate, videoId);
|
||||
|
||||
try {
|
||||
const result = await searchAnilist(rateLimiter, candidate);
|
||||
rateLimited = result.rateLimited;
|
||||
if (result.media.length === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const picked = pickBestSearchResult(
|
||||
searchBase,
|
||||
parsedInfo?.episode ?? null,
|
||||
parsedInfo?.season ?? null,
|
||||
result.media,
|
||||
);
|
||||
if (picked) {
|
||||
const match = result.media.find((media) => media.id === picked.id);
|
||||
if (match) {
|
||||
selected = match;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.error('cover-art: Anilist search error for "%s": %s', candidate, err);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (rateLimited) {
|
||||
logger.warn('cover-art: rate-limited by Anilist, skipping videoId=%d', videoId);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!selected) {
|
||||
logger.info('cover-art: no Anilist results for "%s", caching no-match', searchBase);
|
||||
upsertCoverArt(db, videoId, {
|
||||
anilistId: null,
|
||||
coverUrl: null,
|
||||
coverBlob: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
episodesTotal: null,
|
||||
});
|
||||
return false;
|
||||
}
|
||||
|
||||
const coverUrl = selected.coverImage?.large ?? selected.coverImage?.medium ?? null;
|
||||
let coverBlob: Buffer | null = null;
|
||||
if (coverUrl) {
|
||||
coverBlob = await downloadImage(coverUrl);
|
||||
}
|
||||
|
||||
upsertCoverArt(db, videoId, {
|
||||
anilistId: selected.id,
|
||||
coverUrl,
|
||||
coverBlob,
|
||||
titleRomaji: selected.title?.romaji ?? null,
|
||||
titleEnglish: selected.title?.english ?? null,
|
||||
episodesTotal: selected.episodes ?? null,
|
||||
});
|
||||
|
||||
updateAnimeAnilistInfo(db, videoId, {
|
||||
anilistId: selected.id,
|
||||
titleRomaji: selected.title?.romaji ?? null,
|
||||
titleEnglish: selected.title?.english ?? null,
|
||||
titleNative: selected.title?.native ?? null,
|
||||
episodesTotal: selected.episodes ?? null,
|
||||
});
|
||||
|
||||
logger.info(
|
||||
'cover-art: cached art for videoId=%d anilistId=%d title="%s"',
|
||||
videoId,
|
||||
selected.id,
|
||||
selected.title?.romaji ?? searchBase,
|
||||
);
|
||||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
72
src/core/services/anilist/rate-limiter.ts
Normal file
72
src/core/services/anilist/rate-limiter.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
const DEFAULT_MAX_PER_MINUTE = 20;
|
||||
const WINDOW_MS = 60_000;
|
||||
const SAFETY_REMAINING_THRESHOLD = 5;
|
||||
|
||||
export interface AnilistRateLimiter {
|
||||
acquire(): Promise<void>;
|
||||
recordResponse(headers: Headers): void;
|
||||
}
|
||||
|
||||
export function createAnilistRateLimiter(
|
||||
maxPerMinute = DEFAULT_MAX_PER_MINUTE,
|
||||
): AnilistRateLimiter {
|
||||
const timestamps: number[] = [];
|
||||
let pauseUntilMs = 0;
|
||||
|
||||
function pruneOld(now: number): void {
|
||||
const cutoff = now - WINDOW_MS;
|
||||
while (timestamps.length > 0 && timestamps[0]! < cutoff) {
|
||||
timestamps.shift();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
async acquire(): Promise<void> {
|
||||
const now = Date.now();
|
||||
|
||||
if (now < pauseUntilMs) {
|
||||
const waitMs = pauseUntilMs - now;
|
||||
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
||||
}
|
||||
|
||||
pruneOld(Date.now());
|
||||
|
||||
if (timestamps.length >= maxPerMinute) {
|
||||
const oldest = timestamps[0]!;
|
||||
const waitMs = oldest + WINDOW_MS - Date.now() + 100;
|
||||
if (waitMs > 0) {
|
||||
await new Promise((resolve) => setTimeout(resolve, waitMs));
|
||||
}
|
||||
pruneOld(Date.now());
|
||||
}
|
||||
|
||||
timestamps.push(Date.now());
|
||||
},
|
||||
|
||||
recordResponse(headers: Headers): void {
|
||||
const remaining = headers.get('x-ratelimit-remaining');
|
||||
if (remaining !== null) {
|
||||
const n = parseInt(remaining, 10);
|
||||
if (Number.isFinite(n) && n < SAFETY_REMAINING_THRESHOLD) {
|
||||
const reset = headers.get('x-ratelimit-reset');
|
||||
if (reset) {
|
||||
const resetMs = parseInt(reset, 10) * 1000;
|
||||
if (Number.isFinite(resetMs)) {
|
||||
pauseUntilMs = Math.max(pauseUntilMs, resetMs);
|
||||
}
|
||||
} else {
|
||||
pauseUntilMs = Math.max(pauseUntilMs, Date.now() + WINDOW_MS);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const retryAfter = headers.get('retry-after');
|
||||
if (retryAfter) {
|
||||
const seconds = parseInt(retryAfter, 10);
|
||||
if (Number.isFinite(seconds) && seconds > 0) {
|
||||
pauseUntilMs = Math.max(pauseUntilMs, Date.now() + seconds * 1000);
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -34,6 +34,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
stats: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
|
||||
@@ -176,6 +176,22 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
||||
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime uses minimal startup for texthooker-only mode', async () => {
|
||||
const { deps, calls } = makeDeps({
|
||||
texthookerOnlyMode: true,
|
||||
reloadConfig: () => calls.push('reloadConfig'),
|
||||
handleInitialArgs: () => calls.push('handleInitialArgs'),
|
||||
});
|
||||
|
||||
await runAppReadyRuntime(deps);
|
||||
|
||||
assert.deepEqual(calls, [
|
||||
'ensureDefaultConfigBootstrap',
|
||||
'reloadConfig',
|
||||
'handleInitialArgs',
|
||||
]);
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
||||
const { deps, calls } = makeDeps({
|
||||
startJellyfinRemoteSession: undefined,
|
||||
|
||||
@@ -34,6 +34,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
stats: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
@@ -177,6 +178,9 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
|
||||
mediaTitle: 'Test',
|
||||
entryCount: 10,
|
||||
}),
|
||||
runStatsCommand: async () => {
|
||||
calls.push('runStatsCommand');
|
||||
},
|
||||
runJellyfinCommand: async () => {
|
||||
calls.push('runJellyfinCommand');
|
||||
},
|
||||
@@ -249,6 +253,21 @@ test('handleCliCommand opens first-run setup window for --setup', () => {
|
||||
assert.equal(calls.includes('openYomitanSettingsDelayed:1000'), false);
|
||||
});
|
||||
|
||||
test('handleCliCommand dispatches stats command without overlay startup', async () => {
|
||||
const { deps, calls } = createDeps({
|
||||
runStatsCommand: async () => {
|
||||
calls.push('runStatsCommand');
|
||||
},
|
||||
});
|
||||
|
||||
handleCliCommand(makeArgs({ stats: true }), 'initial', deps);
|
||||
await Promise.resolve();
|
||||
|
||||
assert.ok(calls.includes('runStatsCommand'));
|
||||
assert.equal(calls.includes('initializeOverlayRuntime'), false);
|
||||
assert.equal(calls.includes('connectMpvClient'), false);
|
||||
});
|
||||
|
||||
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
||||
const { deps, calls } = createDeps({
|
||||
setLogLevel: (level) => {
|
||||
@@ -520,8 +539,21 @@ test('handleCliCommand runs refresh-known-words command', () => {
|
||||
assert.ok(calls.includes('refreshKnownWords'));
|
||||
});
|
||||
|
||||
test('handleCliCommand stops app after headless initial refresh-known-words completes', async () => {
|
||||
const { deps, calls } = createDeps({
|
||||
hasMainWindow: () => false,
|
||||
});
|
||||
|
||||
handleCliCommand(makeArgs({ refreshKnownWords: true }), 'initial', deps);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
assert.ok(calls.includes('refreshKnownWords'));
|
||||
assert.ok(calls.includes('stopApp'));
|
||||
});
|
||||
|
||||
test('handleCliCommand reports async refresh-known-words errors to OSD', async () => {
|
||||
const { deps, calls, osd } = createDeps({
|
||||
hasMainWindow: () => false,
|
||||
refreshKnownWords: async () => {
|
||||
throw new Error('refresh boom');
|
||||
},
|
||||
@@ -532,4 +564,5 @@ test('handleCliCommand reports async refresh-known-words errors to OSD', async (
|
||||
|
||||
assert.ok(calls.some((value) => value.startsWith('error:refreshKnownWords failed:')));
|
||||
assert.ok(osd.some((value) => value.includes('Refresh known words failed: refresh boom')));
|
||||
assert.ok(calls.includes('stopApp'));
|
||||
});
|
||||
|
||||
@@ -61,6 +61,7 @@ export interface CliCommandServiceDeps {
|
||||
mediaTitle: string;
|
||||
entryCount: number;
|
||||
}>;
|
||||
runStatsCommand: (args: CliArgs, source: CliCommandSource) => Promise<void>;
|
||||
runJellyfinCommand: (args: CliArgs) => Promise<void>;
|
||||
printHelp: () => void;
|
||||
hasMainWindow: () => boolean;
|
||||
@@ -154,6 +155,7 @@ export interface CliCommandDepsRuntimeOptions {
|
||||
};
|
||||
jellyfin: {
|
||||
openSetup: () => void;
|
||||
runStatsCommand: (args: CliArgs, source: CliCommandSource) => Promise<void>;
|
||||
runCommand: (args: CliArgs) => Promise<void>;
|
||||
};
|
||||
ui: UiCliRuntime;
|
||||
@@ -222,6 +224,7 @@ export function createCliCommandDepsRuntime(
|
||||
getAnilistQueueStatus: options.anilist.getQueueStatus,
|
||||
retryAnilistQueue: options.anilist.retryQueueNow,
|
||||
generateCharacterDictionary: options.dictionary.generate,
|
||||
runStatsCommand: options.jellyfin.runStatsCommand,
|
||||
runJellyfinCommand: options.jellyfin.runCommand,
|
||||
printHelp: options.ui.printHelp,
|
||||
hasMainWindow: options.app.hasMainWindow,
|
||||
@@ -331,12 +334,18 @@ export function handleCliCommand(
|
||||
'Update failed',
|
||||
);
|
||||
} else if (args.refreshKnownWords) {
|
||||
runAsyncWithOsd(
|
||||
() => deps.refreshKnownWords(),
|
||||
deps,
|
||||
'refreshKnownWords',
|
||||
'Refresh known words failed',
|
||||
);
|
||||
const shouldStopAfterRun = source === 'initial' && !deps.hasMainWindow();
|
||||
deps
|
||||
.refreshKnownWords()
|
||||
.catch((err) => {
|
||||
deps.error('refreshKnownWords failed:', err);
|
||||
deps.showMpvOsd(`Refresh known words failed: ${(err as Error).message}`);
|
||||
})
|
||||
.finally(() => {
|
||||
if (shouldStopAfterRun) {
|
||||
deps.stopApp();
|
||||
}
|
||||
});
|
||||
} else if (args.toggleSecondarySub) {
|
||||
deps.cycleSecondarySubMode();
|
||||
} else if (args.triggerFieldGrouping) {
|
||||
@@ -410,6 +419,8 @@ export function handleCliCommand(
|
||||
deps.stopApp();
|
||||
}
|
||||
});
|
||||
} else if (args.stats) {
|
||||
void deps.runStatsCommand(args, source);
|
||||
} else if (args.anilistRetryQueue) {
|
||||
const queueStatus = deps.getAnilistQueueStatus();
|
||||
deps.log(
|
||||
|
||||
@@ -130,6 +130,56 @@ test('createFrequencyDictionaryLookup parses composite displayValue by primary r
|
||||
assert.equal(lookup('高み'), 9933);
|
||||
});
|
||||
|
||||
test('createFrequencyDictionaryLookup uses leading display digits for displayValue strings', async () => {
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-frequency-dict-'));
|
||||
const bankPath = path.join(tempDir, 'term_meta_bank_1.json');
|
||||
fs.writeFileSync(
|
||||
bankPath,
|
||||
JSON.stringify([
|
||||
['潜む', 1, { frequency: { value: 121, displayValue: '118,121' } }],
|
||||
['例', 2, { frequency: { value: 1234, displayValue: '1,234' } }],
|
||||
]),
|
||||
);
|
||||
|
||||
const lookup = await createFrequencyDictionaryLookup({
|
||||
searchPaths: [tempDir],
|
||||
log: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(lookup('潜む'), 118);
|
||||
assert.equal(lookup('例'), 1);
|
||||
});
|
||||
|
||||
test('createFrequencyDictionaryLookup ignores occurrence-based Yomitan dictionaries', async () => {
|
||||
const logs: string[] = [];
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-frequency-dict-'));
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'index.json'),
|
||||
JSON.stringify({
|
||||
title: 'CC100',
|
||||
revision: '1',
|
||||
frequencyMode: 'occurrence-based',
|
||||
}),
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(tempDir, 'term_meta_bank_1.json'),
|
||||
JSON.stringify([['潜む', 1, { frequency: { value: 118121 } }]]),
|
||||
);
|
||||
|
||||
const lookup = await createFrequencyDictionaryLookup({
|
||||
searchPaths: [tempDir],
|
||||
log: (message) => {
|
||||
logs.push(message);
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(lookup('潜む'), null);
|
||||
assert.equal(
|
||||
logs.some((entry) => entry.includes('occurrence-based') && entry.includes('CC100')),
|
||||
true,
|
||||
);
|
||||
});
|
||||
|
||||
test('createFrequencyDictionaryLookup does not require synchronous fs APIs', async () => {
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-frequency-dict-'));
|
||||
const bankPath = path.join(tempDir, 'term_meta_bank_1.json');
|
||||
|
||||
@@ -6,6 +6,8 @@ export interface FrequencyDictionaryLookupOptions {
|
||||
log: (message: string) => void;
|
||||
}
|
||||
|
||||
type FrequencyDictionaryMode = 'occurrence-based' | 'rank-based';
|
||||
|
||||
interface FrequencyDictionaryEntry {
|
||||
rank: number;
|
||||
term: string;
|
||||
@@ -29,30 +31,67 @@ function normalizeFrequencyTerm(value: string): string {
|
||||
return value.trim().toLowerCase();
|
||||
}
|
||||
|
||||
async function readDictionaryMetadata(
|
||||
dictionaryPath: string,
|
||||
log: (message: string) => void,
|
||||
): Promise<{ title: string | null; frequencyMode: FrequencyDictionaryMode | null }> {
|
||||
const indexPath = path.join(dictionaryPath, 'index.json');
|
||||
let rawText: string;
|
||||
try {
|
||||
rawText = await fs.readFile(indexPath, 'utf-8');
|
||||
} catch (error) {
|
||||
if (isErrorCode(error, 'ENOENT')) {
|
||||
return { title: null, frequencyMode: null };
|
||||
}
|
||||
log(`Failed to read frequency dictionary index ${indexPath}: ${String(error)}`);
|
||||
return { title: null, frequencyMode: null };
|
||||
}
|
||||
|
||||
let rawIndex: unknown;
|
||||
try {
|
||||
rawIndex = JSON.parse(rawText) as unknown;
|
||||
} catch {
|
||||
log(`Failed to parse frequency dictionary index as JSON: ${indexPath}`);
|
||||
return { title: null, frequencyMode: null };
|
||||
}
|
||||
|
||||
if (!rawIndex || typeof rawIndex !== 'object') {
|
||||
return { title: null, frequencyMode: null };
|
||||
}
|
||||
|
||||
const titleRaw = (rawIndex as { title?: unknown }).title;
|
||||
const frequencyModeRaw = (rawIndex as { frequencyMode?: unknown }).frequencyMode;
|
||||
return {
|
||||
title: typeof titleRaw === 'string' && titleRaw.trim().length > 0 ? titleRaw.trim() : null,
|
||||
frequencyMode:
|
||||
frequencyModeRaw === 'occurrence-based' || frequencyModeRaw === 'rank-based'
|
||||
? frequencyModeRaw
|
||||
: null,
|
||||
};
|
||||
}
|
||||
|
||||
function parsePositiveFrequencyString(value: string): number | null {
|
||||
const trimmed = value.trim();
|
||||
if (!trimmed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const numericPrefix = trimmed.match(/^\d[\d,]*/)?.[0];
|
||||
if (!numericPrefix) {
|
||||
const numericMatch = trimmed.match(/[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?/)?.[0];
|
||||
if (!numericMatch) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const chunks = numericPrefix.split(',');
|
||||
const normalizedNumber =
|
||||
chunks.length <= 1
|
||||
? (chunks[0] ?? '')
|
||||
: chunks.slice(1).every((chunk) => /^\d{3}$/.test(chunk))
|
||||
? chunks.join('')
|
||||
: (chunks[0] ?? '');
|
||||
const parsed = Number.parseInt(normalizedNumber, 10);
|
||||
const parsed = Number.parseFloat(numericMatch);
|
||||
if (!Number.isFinite(parsed) || parsed <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsed;
|
||||
const normalized = Math.floor(parsed);
|
||||
if (!Number.isFinite(normalized) || normalized <= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function parsePositiveFrequencyNumber(value: unknown): number | null {
|
||||
@@ -68,18 +107,32 @@ function parsePositiveFrequencyNumber(value: unknown): number | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseDisplayFrequencyNumber(value: unknown): number | null {
|
||||
if (typeof value === 'string') {
|
||||
const leadingDigits = value.trim().match(/^\d+/)?.[0];
|
||||
if (!leadingDigits) {
|
||||
return null;
|
||||
}
|
||||
const parsed = Number.parseInt(leadingDigits, 10);
|
||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
||||
}
|
||||
|
||||
return parsePositiveFrequencyNumber(value);
|
||||
}
|
||||
|
||||
function extractFrequencyDisplayValue(meta: unknown): number | null {
|
||||
if (!meta || typeof meta !== 'object') return null;
|
||||
const frequency = (meta as { frequency?: unknown }).frequency;
|
||||
if (!frequency || typeof frequency !== 'object') return null;
|
||||
const rawValue = (frequency as { value?: unknown }).value;
|
||||
const parsedRawValue = parsePositiveFrequencyNumber(rawValue);
|
||||
const displayValue = (frequency as { displayValue?: unknown }).displayValue;
|
||||
const parsedDisplayValue = parsePositiveFrequencyNumber(displayValue);
|
||||
const parsedDisplayValue = parseDisplayFrequencyNumber(displayValue);
|
||||
if (parsedDisplayValue !== null) {
|
||||
return parsedDisplayValue;
|
||||
}
|
||||
|
||||
const rawValue = (frequency as { value?: unknown }).value;
|
||||
return parsePositiveFrequencyNumber(rawValue);
|
||||
return parsedRawValue;
|
||||
}
|
||||
|
||||
function asFrequencyDictionaryEntry(entry: unknown): FrequencyDictionaryEntry | null {
|
||||
@@ -141,6 +194,15 @@ async function collectDictionaryFromPath(
|
||||
log: (message: string) => void,
|
||||
): Promise<Map<string, number>> {
|
||||
const terms = new Map<string, number>();
|
||||
const metadata = await readDictionaryMetadata(dictionaryPath, log);
|
||||
if (metadata.frequencyMode === 'occurrence-based') {
|
||||
log(
|
||||
`Skipping occurrence-based frequency dictionary ${
|
||||
metadata.title ?? dictionaryPath
|
||||
}; SubMiner frequency tags require rank-based values.`,
|
||||
);
|
||||
return terms;
|
||||
}
|
||||
|
||||
let fileNames: string[];
|
||||
try {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2681
src/core/services/immersion-tracker/__tests__/query.test.ts
Normal file
2681
src/core/services/immersion-tracker/__tests__/query.test.ts
Normal file
File diff suppressed because it is too large
Load Diff
71
src/core/services/immersion-tracker/legacy-vocabulary-pos.ts
Normal file
71
src/core/services/immersion-tracker/legacy-vocabulary-pos.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import type { Token } from '../../../types';
|
||||
import type { LegacyVocabularyPosResolution } from './types';
|
||||
import { deriveStoredPartOfSpeech } from '../tokenizer/part-of-speech';
|
||||
|
||||
const KATAKANA_TO_HIRAGANA_OFFSET = 0x60;
|
||||
const KATAKANA_CODEPOINT_START = 0x30a1;
|
||||
const KATAKANA_CODEPOINT_END = 0x30f6;
|
||||
|
||||
function normalizeLookupText(value: string | null | undefined): string {
|
||||
return typeof value === 'string' ? value.trim() : '';
|
||||
}
|
||||
|
||||
function katakanaToHiragana(text: string): string {
|
||||
let normalized = '';
|
||||
for (const char of text) {
|
||||
const code = char.codePointAt(0);
|
||||
if (code === undefined) {
|
||||
continue;
|
||||
}
|
||||
if (code >= KATAKANA_CODEPOINT_START && code <= KATAKANA_CODEPOINT_END) {
|
||||
normalized += String.fromCodePoint(code - KATAKANA_TO_HIRAGANA_OFFSET);
|
||||
continue;
|
||||
}
|
||||
normalized += char;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
function toResolution(token: Token): LegacyVocabularyPosResolution {
|
||||
return {
|
||||
headword: normalizeLookupText(token.headword) || normalizeLookupText(token.word),
|
||||
reading: katakanaToHiragana(normalizeLookupText(token.katakanaReading)),
|
||||
partOfSpeech: deriveStoredPartOfSpeech({
|
||||
partOfSpeech: token.partOfSpeech,
|
||||
pos1: token.pos1,
|
||||
}),
|
||||
pos1: normalizeLookupText(token.pos1),
|
||||
pos2: normalizeLookupText(token.pos2),
|
||||
pos3: normalizeLookupText(token.pos3),
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveLegacyVocabularyPosFromTokens(
|
||||
lookupText: string,
|
||||
tokens: Token[] | null,
|
||||
): LegacyVocabularyPosResolution | null {
|
||||
const normalizedLookup = normalizeLookupText(lookupText);
|
||||
if (!normalizedLookup || !tokens || tokens.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const exactSurfaceMatches = tokens.filter(
|
||||
(token) => normalizeLookupText(token.word) === normalizedLookup,
|
||||
);
|
||||
if (exactSurfaceMatches.length === 1) {
|
||||
return toResolution(exactSurfaceMatches[0]!);
|
||||
}
|
||||
|
||||
const exactHeadwordMatches = tokens.filter(
|
||||
(token) => normalizeLookupText(token.headword) === normalizedLookup,
|
||||
);
|
||||
if (exactHeadwordMatches.length === 1) {
|
||||
return toResolution(exactHeadwordMatches[0]!);
|
||||
}
|
||||
|
||||
if (tokens.length === 1) {
|
||||
return toResolution(tokens[0]!);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
569
src/core/services/immersion-tracker/lifetime.ts
Normal file
569
src/core/services/immersion-tracker/lifetime.ts
Normal file
@@ -0,0 +1,569 @@
|
||||
import type { DatabaseSync } from './sqlite';
|
||||
import { finalizeSessionRecord } from './session';
|
||||
import type { LifetimeRebuildSummary, SessionState } from './types';
|
||||
|
||||
interface TelemetryRow {
|
||||
active_watched_ms: number | null;
|
||||
cards_mined: number | null;
|
||||
lines_seen: number | null;
|
||||
tokens_seen: number | null;
|
||||
}
|
||||
|
||||
interface VideoRow {
|
||||
anime_id: number | null;
|
||||
watched: number;
|
||||
}
|
||||
|
||||
interface AnimeRow {
|
||||
episodes_total: number | null;
|
||||
}
|
||||
|
||||
function asPositiveNumber(value: number | null, fallback: number): number {
|
||||
if (value === null || !Number.isFinite(value)) {
|
||||
return fallback;
|
||||
}
|
||||
return Math.max(0, Math.floor(value));
|
||||
}
|
||||
|
||||
interface ExistenceRow {
|
||||
count: number;
|
||||
}
|
||||
|
||||
interface LifetimeMediaStateRow {
|
||||
completed: number;
|
||||
}
|
||||
|
||||
interface LifetimeAnimeStateRow {
|
||||
episodes_completed: number;
|
||||
}
|
||||
|
||||
interface RetainedSessionRow {
|
||||
sessionId: number;
|
||||
videoId: number;
|
||||
startedAtMs: number;
|
||||
endedAtMs: number;
|
||||
lastMediaMs: number | null;
|
||||
totalWatchedMs: number;
|
||||
activeWatchedMs: number;
|
||||
linesSeen: number;
|
||||
tokensSeen: number;
|
||||
cardsMined: number;
|
||||
lookupCount: number;
|
||||
lookupHits: number;
|
||||
yomitanLookupCount: number;
|
||||
pauseCount: number;
|
||||
pauseMs: number;
|
||||
seekForwardCount: number;
|
||||
seekBackwardCount: number;
|
||||
mediaBufferEvents: number;
|
||||
}
|
||||
|
||||
function hasRetainedPriorSession(
|
||||
db: DatabaseSync,
|
||||
videoId: number,
|
||||
startedAtMs: number,
|
||||
currentSessionId: number,
|
||||
): boolean {
|
||||
return (
|
||||
Number(
|
||||
(
|
||||
db
|
||||
.prepare(
|
||||
`
|
||||
SELECT COUNT(*) AS count
|
||||
FROM imm_sessions
|
||||
WHERE video_id = ?
|
||||
AND (
|
||||
started_at_ms < ?
|
||||
OR (started_at_ms = ? AND session_id < ?)
|
||||
)
|
||||
`,
|
||||
)
|
||||
.get(videoId, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
|
||||
)?.count ?? 0,
|
||||
) > 0
|
||||
);
|
||||
}
|
||||
|
||||
function isFirstSessionForLocalDay(
|
||||
db: DatabaseSync,
|
||||
currentSessionId: number,
|
||||
startedAtMs: number,
|
||||
): boolean {
|
||||
return (
|
||||
(
|
||||
db
|
||||
.prepare(
|
||||
`
|
||||
SELECT COUNT(*) AS count
|
||||
FROM imm_sessions
|
||||
WHERE CAST(strftime('%s', started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400
|
||||
= CAST(strftime('%s', ? / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400
|
||||
AND (
|
||||
started_at_ms < ?
|
||||
OR (started_at_ms = ? AND session_id < ?)
|
||||
)
|
||||
`,
|
||||
)
|
||||
.get(startedAtMs, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
|
||||
)?.count === 0
|
||||
);
|
||||
}
|
||||
|
||||
function resetLifetimeSummaries(db: DatabaseSync, nowMs: number): void {
|
||||
db.exec(`
|
||||
DELETE FROM imm_lifetime_anime;
|
||||
DELETE FROM imm_lifetime_media;
|
||||
DELETE FROM imm_lifetime_applied_sessions;
|
||||
`);
|
||||
db.prepare(
|
||||
`
|
||||
UPDATE imm_lifetime_global
|
||||
SET
|
||||
total_sessions = 0,
|
||||
total_active_ms = 0,
|
||||
total_cards = 0,
|
||||
active_days = 0,
|
||||
episodes_started = 0,
|
||||
episodes_completed = 0,
|
||||
anime_completed = 0,
|
||||
last_rebuilt_ms = ?,
|
||||
LAST_UPDATE_DATE = ?
|
||||
WHERE global_id = 1
|
||||
`,
|
||||
).run(nowMs, nowMs);
|
||||
}
|
||||
|
||||
function toRebuildSessionState(row: RetainedSessionRow): SessionState {
|
||||
return {
|
||||
sessionId: row.sessionId,
|
||||
videoId: row.videoId,
|
||||
startedAtMs: row.startedAtMs,
|
||||
currentLineIndex: 0,
|
||||
lastWallClockMs: row.endedAtMs,
|
||||
lastMediaMs: row.lastMediaMs,
|
||||
lastPauseStartMs: null,
|
||||
isPaused: false,
|
||||
pendingTelemetry: false,
|
||||
markedWatched: false,
|
||||
totalWatchedMs: Math.max(0, row.totalWatchedMs),
|
||||
activeWatchedMs: Math.max(0, row.activeWatchedMs),
|
||||
linesSeen: Math.max(0, row.linesSeen),
|
||||
tokensSeen: Math.max(0, row.tokensSeen),
|
||||
cardsMined: Math.max(0, row.cardsMined),
|
||||
lookupCount: Math.max(0, row.lookupCount),
|
||||
lookupHits: Math.max(0, row.lookupHits),
|
||||
yomitanLookupCount: Math.max(0, row.yomitanLookupCount),
|
||||
pauseCount: Math.max(0, row.pauseCount),
|
||||
pauseMs: Math.max(0, row.pauseMs),
|
||||
seekForwardCount: Math.max(0, row.seekForwardCount),
|
||||
seekBackwardCount: Math.max(0, row.seekBackwardCount),
|
||||
mediaBufferEvents: Math.max(0, row.mediaBufferEvents),
|
||||
};
|
||||
}
|
||||
|
||||
function getRetainedStaleActiveSessions(db: DatabaseSync): RetainedSessionRow[] {
|
||||
return db
|
||||
.prepare(
|
||||
`
|
||||
SELECT
|
||||
s.session_id AS sessionId,
|
||||
s.video_id AS videoId,
|
||||
s.started_at_ms AS startedAtMs,
|
||||
COALESCE(t.sample_ms, s.LAST_UPDATE_DATE, s.started_at_ms) AS endedAtMs,
|
||||
s.ended_media_ms AS lastMediaMs,
|
||||
COALESCE(t.total_watched_ms, s.total_watched_ms, 0) AS totalWatchedMs,
|
||||
COALESCE(t.active_watched_ms, s.active_watched_ms, 0) AS activeWatchedMs,
|
||||
COALESCE(t.lines_seen, s.lines_seen, 0) AS linesSeen,
|
||||
COALESCE(t.tokens_seen, s.tokens_seen, 0) AS tokensSeen,
|
||||
COALESCE(t.cards_mined, s.cards_mined, 0) AS cardsMined,
|
||||
COALESCE(t.lookup_count, s.lookup_count, 0) AS lookupCount,
|
||||
COALESCE(t.lookup_hits, s.lookup_hits, 0) AS lookupHits,
|
||||
COALESCE(t.yomitan_lookup_count, s.yomitan_lookup_count, 0) AS yomitanLookupCount,
|
||||
COALESCE(t.pause_count, s.pause_count, 0) AS pauseCount,
|
||||
COALESCE(t.pause_ms, s.pause_ms, 0) AS pauseMs,
|
||||
COALESCE(t.seek_forward_count, s.seek_forward_count, 0) AS seekForwardCount,
|
||||
COALESCE(t.seek_backward_count, s.seek_backward_count, 0) AS seekBackwardCount,
|
||||
COALESCE(t.media_buffer_events, s.media_buffer_events, 0) AS mediaBufferEvents
|
||||
FROM imm_sessions s
|
||||
LEFT JOIN imm_session_telemetry t
|
||||
ON t.telemetry_id = (
|
||||
SELECT telemetry_id
|
||||
FROM imm_session_telemetry
|
||||
WHERE session_id = s.session_id
|
||||
ORDER BY sample_ms DESC, telemetry_id DESC
|
||||
LIMIT 1
|
||||
)
|
||||
WHERE s.ended_at_ms IS NULL
|
||||
ORDER BY s.started_at_ms ASC, s.session_id ASC
|
||||
`,
|
||||
)
|
||||
.all() as RetainedSessionRow[];
|
||||
}
|
||||
|
||||
function upsertLifetimeMedia(
|
||||
db: DatabaseSync,
|
||||
videoId: number,
|
||||
nowMs: number,
|
||||
activeMs: number,
|
||||
cardsMined: number,
|
||||
linesSeen: number,
|
||||
tokensSeen: number,
|
||||
completed: number,
|
||||
startedAtMs: number,
|
||||
endedAtMs: number,
|
||||
): void {
|
||||
db.prepare(
|
||||
`
|
||||
INSERT INTO imm_lifetime_media(
|
||||
video_id,
|
||||
total_sessions,
|
||||
total_active_ms,
|
||||
total_cards,
|
||||
total_lines_seen,
|
||||
total_tokens_seen,
|
||||
completed,
|
||||
first_watched_ms,
|
||||
last_watched_ms,
|
||||
CREATED_DATE,
|
||||
LAST_UPDATE_DATE
|
||||
)
|
||||
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(video_id) DO UPDATE SET
|
||||
total_sessions = total_sessions + 1,
|
||||
total_active_ms = total_active_ms + excluded.total_active_ms,
|
||||
total_cards = total_cards + excluded.total_cards,
|
||||
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
|
||||
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
|
||||
completed = MAX(completed, excluded.completed),
|
||||
first_watched_ms = CASE
|
||||
WHEN excluded.first_watched_ms IS NULL THEN first_watched_ms
|
||||
WHEN first_watched_ms IS NULL THEN excluded.first_watched_ms
|
||||
WHEN excluded.first_watched_ms < first_watched_ms THEN excluded.first_watched_ms
|
||||
ELSE first_watched_ms
|
||||
END,
|
||||
last_watched_ms = CASE
|
||||
WHEN excluded.last_watched_ms IS NULL THEN last_watched_ms
|
||||
WHEN last_watched_ms IS NULL THEN excluded.last_watched_ms
|
||||
WHEN excluded.last_watched_ms > last_watched_ms THEN excluded.last_watched_ms
|
||||
ELSE last_watched_ms
|
||||
END,
|
||||
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
|
||||
`,
|
||||
).run(
|
||||
videoId,
|
||||
activeMs,
|
||||
cardsMined,
|
||||
linesSeen,
|
||||
tokensSeen,
|
||||
completed,
|
||||
startedAtMs,
|
||||
endedAtMs,
|
||||
nowMs,
|
||||
nowMs,
|
||||
);
|
||||
}
|
||||
|
||||
function upsertLifetimeAnime(
|
||||
db: DatabaseSync,
|
||||
animeId: number,
|
||||
nowMs: number,
|
||||
activeMs: number,
|
||||
cardsMined: number,
|
||||
linesSeen: number,
|
||||
tokensSeen: number,
|
||||
episodesStartedDelta: number,
|
||||
episodesCompletedDelta: number,
|
||||
startedAtMs: number,
|
||||
endedAtMs: number,
|
||||
): void {
|
||||
db.prepare(
|
||||
`
|
||||
INSERT INTO imm_lifetime_anime(
|
||||
anime_id,
|
||||
total_sessions,
|
||||
total_active_ms,
|
||||
total_cards,
|
||||
total_lines_seen,
|
||||
total_tokens_seen,
|
||||
episodes_started,
|
||||
episodes_completed,
|
||||
first_watched_ms,
|
||||
last_watched_ms,
|
||||
CREATED_DATE,
|
||||
LAST_UPDATE_DATE
|
||||
)
|
||||
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(anime_id) DO UPDATE SET
|
||||
total_sessions = total_sessions + 1,
|
||||
total_active_ms = total_active_ms + excluded.total_active_ms,
|
||||
total_cards = total_cards + excluded.total_cards,
|
||||
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
|
||||
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
|
||||
episodes_started = episodes_started + excluded.episodes_started,
|
||||
episodes_completed = episodes_completed + excluded.episodes_completed,
|
||||
first_watched_ms = CASE
|
||||
WHEN excluded.first_watched_ms IS NULL THEN first_watched_ms
|
||||
WHEN first_watched_ms IS NULL THEN excluded.first_watched_ms
|
||||
WHEN excluded.first_watched_ms < first_watched_ms THEN excluded.first_watched_ms
|
||||
ELSE first_watched_ms
|
||||
END,
|
||||
last_watched_ms = CASE
|
||||
WHEN excluded.last_watched_ms IS NULL THEN last_watched_ms
|
||||
WHEN last_watched_ms IS NULL THEN excluded.last_watched_ms
|
||||
WHEN excluded.last_watched_ms > last_watched_ms THEN excluded.last_watched_ms
|
||||
ELSE last_watched_ms
|
||||
END,
|
||||
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
|
||||
`,
|
||||
).run(
|
||||
animeId,
|
||||
activeMs,
|
||||
cardsMined,
|
||||
linesSeen,
|
||||
tokensSeen,
|
||||
episodesStartedDelta,
|
||||
episodesCompletedDelta,
|
||||
startedAtMs,
|
||||
endedAtMs,
|
||||
nowMs,
|
||||
nowMs,
|
||||
);
|
||||
}
|
||||
|
||||
export function applySessionLifetimeSummary(
|
||||
db: DatabaseSync,
|
||||
session: SessionState,
|
||||
endedAtMs: number,
|
||||
): void {
|
||||
const applyResult = db
|
||||
.prepare(
|
||||
`
|
||||
INSERT INTO imm_lifetime_applied_sessions (
|
||||
session_id,
|
||||
applied_at_ms,
|
||||
CREATED_DATE,
|
||||
LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
?, ?, ?, ?
|
||||
)
|
||||
ON CONFLICT(session_id) DO NOTHING
|
||||
`,
|
||||
)
|
||||
.run(session.sessionId, endedAtMs, Date.now(), Date.now());
|
||||
|
||||
if ((applyResult.changes ?? 0) <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const telemetry = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT
|
||||
active_watched_ms,
|
||||
cards_mined,
|
||||
lines_seen,
|
||||
tokens_seen
|
||||
FROM imm_session_telemetry
|
||||
WHERE session_id = ?
|
||||
ORDER BY sample_ms DESC, telemetry_id DESC
|
||||
LIMIT 1
|
||||
`,
|
||||
)
|
||||
.get(session.sessionId) as TelemetryRow | null;
|
||||
|
||||
const video = db
|
||||
.prepare('SELECT anime_id, watched FROM imm_videos WHERE video_id = ?')
|
||||
.get(session.videoId) as VideoRow | null;
|
||||
const mediaLifetime =
|
||||
(db
|
||||
.prepare('SELECT completed FROM imm_lifetime_media WHERE video_id = ?')
|
||||
.get(session.videoId) as LifetimeMediaStateRow | null | undefined) ?? null;
|
||||
const animeLifetime = video?.anime_id
|
||||
? ((db
|
||||
.prepare('SELECT episodes_completed FROM imm_lifetime_anime WHERE anime_id = ?')
|
||||
.get(video.anime_id) as LifetimeAnimeStateRow | null | undefined) ?? null)
|
||||
: null;
|
||||
const anime = video?.anime_id
|
||||
? ((db
|
||||
.prepare('SELECT episodes_total FROM imm_anime WHERE anime_id = ?')
|
||||
.get(video.anime_id) as AnimeRow | null | undefined) ?? null)
|
||||
: null;
|
||||
|
||||
const activeMs = telemetry
|
||||
? asPositiveNumber(telemetry.active_watched_ms, session.activeWatchedMs)
|
||||
: session.activeWatchedMs;
|
||||
const cardsMined = telemetry
|
||||
? asPositiveNumber(telemetry.cards_mined, session.cardsMined)
|
||||
: session.cardsMined;
|
||||
const linesSeen = telemetry
|
||||
? asPositiveNumber(telemetry.lines_seen, session.linesSeen)
|
||||
: session.linesSeen;
|
||||
const tokensSeen = telemetry
|
||||
? asPositiveNumber(telemetry.tokens_seen, session.tokensSeen)
|
||||
: session.tokensSeen;
|
||||
const watched = video?.watched ?? 0;
|
||||
const isFirstSessionForVideoRun =
|
||||
mediaLifetime === null &&
|
||||
!hasRetainedPriorSession(db, session.videoId, session.startedAtMs, session.sessionId);
|
||||
const isFirstCompletedSessionForVideoRun =
|
||||
watched > 0 && Number(mediaLifetime?.completed ?? 0) <= 0;
|
||||
const isFirstSessionForDay = isFirstSessionForLocalDay(
|
||||
db,
|
||||
session.sessionId,
|
||||
session.startedAtMs,
|
||||
);
|
||||
const episodesCompletedBefore = Number(animeLifetime?.episodes_completed ?? 0);
|
||||
const animeEpisodesTotal = anime?.episodes_total ?? null;
|
||||
const animeCompletedDelta =
|
||||
watched > 0 &&
|
||||
isFirstCompletedSessionForVideoRun &&
|
||||
animeEpisodesTotal !== null &&
|
||||
animeEpisodesTotal > 0 &&
|
||||
episodesCompletedBefore < animeEpisodesTotal &&
|
||||
episodesCompletedBefore + 1 >= animeEpisodesTotal
|
||||
? 1
|
||||
: 0;
|
||||
|
||||
const nowMs = Date.now();
|
||||
db.prepare(
|
||||
`
|
||||
UPDATE imm_lifetime_global
|
||||
SET
|
||||
total_sessions = total_sessions + 1,
|
||||
total_active_ms = total_active_ms + ?,
|
||||
total_cards = total_cards + ?,
|
||||
active_days = active_days + ?,
|
||||
episodes_started = episodes_started + ?,
|
||||
episodes_completed = episodes_completed + ?,
|
||||
anime_completed = anime_completed + ?,
|
||||
LAST_UPDATE_DATE = ?
|
||||
WHERE global_id = 1
|
||||
`,
|
||||
).run(
|
||||
activeMs,
|
||||
cardsMined,
|
||||
isFirstSessionForDay ? 1 : 0,
|
||||
isFirstSessionForVideoRun ? 1 : 0,
|
||||
isFirstCompletedSessionForVideoRun ? 1 : 0,
|
||||
animeCompletedDelta,
|
||||
nowMs,
|
||||
);
|
||||
|
||||
upsertLifetimeMedia(
|
||||
db,
|
||||
session.videoId,
|
||||
nowMs,
|
||||
activeMs,
|
||||
cardsMined,
|
||||
linesSeen,
|
||||
tokensSeen,
|
||||
watched > 0 ? 1 : 0,
|
||||
session.startedAtMs,
|
||||
endedAtMs,
|
||||
);
|
||||
|
||||
if (video?.anime_id) {
|
||||
upsertLifetimeAnime(
|
||||
db,
|
||||
video.anime_id,
|
||||
nowMs,
|
||||
activeMs,
|
||||
cardsMined,
|
||||
linesSeen,
|
||||
tokensSeen,
|
||||
isFirstSessionForVideoRun ? 1 : 0,
|
||||
isFirstCompletedSessionForVideoRun ? 1 : 0,
|
||||
session.startedAtMs,
|
||||
endedAtMs,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function rebuildLifetimeSummaries(db: DatabaseSync): LifetimeRebuildSummary {
|
||||
const rebuiltAtMs = Date.now();
|
||||
const sessions = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT
|
||||
session_id AS sessionId,
|
||||
video_id AS videoId,
|
||||
started_at_ms AS startedAtMs,
|
||||
ended_at_ms AS endedAtMs,
|
||||
total_watched_ms AS totalWatchedMs,
|
||||
active_watched_ms AS activeWatchedMs,
|
||||
lines_seen AS linesSeen,
|
||||
tokens_seen AS tokensSeen,
|
||||
cards_mined AS cardsMined,
|
||||
lookup_count AS lookupCount,
|
||||
lookup_hits AS lookupHits,
|
||||
yomitan_lookup_count AS yomitanLookupCount,
|
||||
pause_count AS pauseCount,
|
||||
pause_ms AS pauseMs,
|
||||
seek_forward_count AS seekForwardCount,
|
||||
seek_backward_count AS seekBackwardCount,
|
||||
media_buffer_events AS mediaBufferEvents
|
||||
FROM imm_sessions
|
||||
WHERE ended_at_ms IS NOT NULL
|
||||
ORDER BY started_at_ms ASC, session_id ASC
|
||||
`,
|
||||
)
|
||||
.all() as RetainedSessionRow[];
|
||||
|
||||
db.exec('BEGIN');
|
||||
try {
|
||||
resetLifetimeSummaries(db, rebuiltAtMs);
|
||||
for (const session of sessions) {
|
||||
applySessionLifetimeSummary(db, toRebuildSessionState(session), session.endedAtMs);
|
||||
}
|
||||
db.exec('COMMIT');
|
||||
} catch (error) {
|
||||
db.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
|
||||
return {
|
||||
appliedSessions: sessions.length,
|
||||
rebuiltAtMs,
|
||||
};
|
||||
}
|
||||
|
||||
export function reconcileStaleActiveSessions(db: DatabaseSync): number {
|
||||
const sessions = getRetainedStaleActiveSessions(db);
|
||||
if (sessions.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
db.exec('BEGIN');
|
||||
try {
|
||||
for (const session of sessions) {
|
||||
const state = toRebuildSessionState(session);
|
||||
finalizeSessionRecord(db, state, session.endedAtMs);
|
||||
applySessionLifetimeSummary(db, state, session.endedAtMs);
|
||||
}
|
||||
db.exec('COMMIT');
|
||||
} catch (error) {
|
||||
db.exec('ROLLBACK');
|
||||
throw error;
|
||||
}
|
||||
|
||||
return sessions.length;
|
||||
}
|
||||
|
||||
export function shouldBackfillLifetimeSummaries(db: DatabaseSync): boolean {
|
||||
const globalRow = db
|
||||
.prepare('SELECT total_sessions AS totalSessions FROM imm_lifetime_global WHERE global_id = 1')
|
||||
.get() as { totalSessions: number } | null;
|
||||
const appliedRow = db
|
||||
.prepare('SELECT COUNT(*) AS count FROM imm_lifetime_applied_sessions')
|
||||
.get() as ExistenceRow | null;
|
||||
const endedRow = db
|
||||
.prepare('SELECT COUNT(*) AS count FROM imm_sessions WHERE ended_at_ms IS NOT NULL')
|
||||
.get() as ExistenceRow | null;
|
||||
|
||||
const totalSessions = Number(globalRow?.totalSessions ?? 0);
|
||||
const appliedSessions = Number(appliedRow?.count ?? 0);
|
||||
const retainedEndedSessions = Number(endedRow?.count ?? 0);
|
||||
|
||||
return retainedEndedSessions > 0 && (appliedSessions === 0 || totalSessions === 0);
|
||||
}
|
||||
200
src/core/services/immersion-tracker/maintenance.test.ts
Normal file
200
src/core/services/immersion-tracker/maintenance.test.ts
Normal file
@@ -0,0 +1,200 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import { Database } from './sqlite';
|
||||
import {
|
||||
pruneRawRetention,
|
||||
pruneRollupRetention,
|
||||
runOptimizeMaintenance,
|
||||
toMonthKey,
|
||||
} from './maintenance';
|
||||
import { ensureSchema } from './storage';
|
||||
|
||||
function makeDbPath(): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-maintenance-test-'));
|
||||
return path.join(dir, 'tracker.db');
|
||||
}
|
||||
|
||||
function cleanupDbPath(dbPath: string): void {
|
||||
try {
|
||||
fs.rmSync(path.dirname(dbPath), { recursive: true, force: true });
|
||||
} catch {
|
||||
// best effort
|
||||
}
|
||||
}
|
||||
|
||||
test('pruneRawRetention uses session retention separately from telemetry retention', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const nowMs = 90 * 86_400_000;
|
||||
const staleEndedAtMs = nowMs - 40 * 86_400_000;
|
||||
const keptEndedAtMs = nowMs - 5 * 86_400_000;
|
||||
|
||||
db.exec(`
|
||||
INSERT INTO imm_videos (
|
||||
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
|
||||
);
|
||||
INSERT INTO imm_sessions (
|
||||
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES
|
||||
(1, 'session-1', 1, ${staleEndedAtMs - 1_000}, ${staleEndedAtMs}, 2, ${staleEndedAtMs}, ${staleEndedAtMs}),
|
||||
(2, 'session-2', 1, ${keptEndedAtMs - 1_000}, ${keptEndedAtMs}, 2, ${keptEndedAtMs}, ${keptEndedAtMs});
|
||||
INSERT INTO imm_session_telemetry (
|
||||
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES
|
||||
(1, ${nowMs - 2 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}),
|
||||
(2, ${nowMs - 12 * 60 * 60 * 1000}, 0, 0, ${nowMs}, ${nowMs});
|
||||
`);
|
||||
|
||||
const result = pruneRawRetention(db, nowMs, {
|
||||
eventsRetentionMs: 7 * 86_400_000,
|
||||
telemetryRetentionMs: 1 * 86_400_000,
|
||||
sessionsRetentionMs: 30 * 86_400_000,
|
||||
});
|
||||
|
||||
const remainingSessions = db
|
||||
.prepare('SELECT session_id FROM imm_sessions ORDER BY session_id')
|
||||
.all() as Array<{ session_id: number }>;
|
||||
const remainingTelemetry = db
|
||||
.prepare('SELECT session_id FROM imm_session_telemetry ORDER BY session_id')
|
||||
.all() as Array<{ session_id: number }>;
|
||||
|
||||
assert.equal(result.deletedTelemetryRows, 1);
|
||||
assert.equal(result.deletedEndedSessions, 1);
|
||||
assert.deepEqual(
|
||||
remainingSessions.map((row) => row.session_id),
|
||||
[2],
|
||||
);
|
||||
assert.deepEqual(
|
||||
remainingTelemetry.map((row) => row.session_id),
|
||||
[2],
|
||||
);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('raw retention keeps rollups and rollup retention prunes them separately', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const nowMs = Date.UTC(2026, 2, 16, 12, 0, 0, 0);
|
||||
const oldDay = Math.floor((nowMs - 90 * 86_400_000) / 86_400_000);
|
||||
const oldMonth = toMonthKey(nowMs - 400 * 86_400_000);
|
||||
|
||||
db.exec(`
|
||||
INSERT INTO imm_videos (
|
||||
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
|
||||
);
|
||||
INSERT INTO imm_sessions (
|
||||
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 'session-1', 1, ${nowMs - 90 * 86_400_000}, ${nowMs - 90 * 86_400_000 + 1_000}, 2, ${nowMs}, ${nowMs}
|
||||
);
|
||||
INSERT INTO imm_session_telemetry (
|
||||
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, ${nowMs - 90 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}
|
||||
);
|
||||
INSERT INTO imm_daily_rollups (
|
||||
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_tokens_seen, total_cards
|
||||
) VALUES (
|
||||
${oldDay}, 1, 1, 10, 1, 1, 1
|
||||
);
|
||||
INSERT INTO imm_monthly_rollups (
|
||||
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
${oldMonth}, 1, 1, 10, 1, 1, 1, ${nowMs}, ${nowMs}
|
||||
);
|
||||
`);
|
||||
|
||||
pruneRawRetention(db, nowMs, {
|
||||
eventsRetentionMs: 7 * 86_400_000,
|
||||
telemetryRetentionMs: 30 * 86_400_000,
|
||||
sessionsRetentionMs: 30 * 86_400_000,
|
||||
});
|
||||
|
||||
const rollupsAfterRawPrune = db
|
||||
.prepare('SELECT COUNT(*) AS total FROM imm_daily_rollups')
|
||||
.get() as { total: number } | null;
|
||||
const monthlyAfterRawPrune = db
|
||||
.prepare('SELECT COUNT(*) AS total FROM imm_monthly_rollups')
|
||||
.get() as { total: number } | null;
|
||||
|
||||
assert.equal(rollupsAfterRawPrune?.total, 1);
|
||||
assert.equal(monthlyAfterRawPrune?.total, 1);
|
||||
|
||||
const rollupPrune = pruneRollupRetention(db, nowMs, {
|
||||
dailyRollupRetentionMs: 30 * 86_400_000,
|
||||
monthlyRollupRetentionMs: 365 * 86_400_000,
|
||||
});
|
||||
|
||||
const rollupsAfterRollupPrune = db
|
||||
.prepare('SELECT COUNT(*) AS total FROM imm_daily_rollups')
|
||||
.get() as { total: number } | null;
|
||||
const monthlyAfterRollupPrune = db
|
||||
.prepare('SELECT COUNT(*) AS total FROM imm_monthly_rollups')
|
||||
.get() as { total: number } | null;
|
||||
|
||||
assert.equal(rollupPrune.deletedDailyRows, 1);
|
||||
assert.equal(rollupPrune.deletedMonthlyRows, 1);
|
||||
assert.equal(rollupsAfterRollupPrune?.total, 0);
|
||||
assert.equal(monthlyAfterRollupPrune?.total, 0);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema adds sample_ms index for telemetry rollup scans', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const indexes = db.prepare("PRAGMA index_list('imm_session_telemetry')").all() as Array<{
|
||||
name: string;
|
||||
}>;
|
||||
const hasSampleMsIndex = indexes.some((row) => row.name === 'idx_telemetry_sample_ms');
|
||||
assert.equal(hasSampleMsIndex, true);
|
||||
|
||||
const indexColumns = db.prepare("PRAGMA index_info('idx_telemetry_sample_ms')").all() as Array<{
|
||||
name: string;
|
||||
}>;
|
||||
assert.deepEqual(
|
||||
indexColumns.map((column) => column.name),
|
||||
['sample_ms'],
|
||||
);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('runOptimizeMaintenance executes PRAGMA optimize', () => {
|
||||
const executedSql: string[] = [];
|
||||
const db = {
|
||||
exec(source: string) {
|
||||
executedSql.push(source);
|
||||
return this;
|
||||
},
|
||||
} as unknown as Parameters<typeof runOptimizeMaintenance>[0];
|
||||
|
||||
runOptimizeMaintenance(db);
|
||||
|
||||
assert.deepEqual(executedSql, ['PRAGMA optimize']);
|
||||
});
|
||||
@@ -18,11 +18,9 @@ interface RollupTelemetryResult {
|
||||
maxSampleMs: number | null;
|
||||
}
|
||||
|
||||
interface RetentionResult {
|
||||
interface RawRetentionResult {
|
||||
deletedSessionEvents: number;
|
||||
deletedTelemetryRows: number;
|
||||
deletedDailyRows: number;
|
||||
deletedMonthlyRows: number;
|
||||
deletedEndedSessions: number;
|
||||
}
|
||||
|
||||
@@ -31,20 +29,18 @@ export function toMonthKey(timestampMs: number): number {
|
||||
return monthDate.getUTCFullYear() * 100 + monthDate.getUTCMonth() + 1;
|
||||
}
|
||||
|
||||
export function pruneRetention(
|
||||
export function pruneRawRetention(
|
||||
db: DatabaseSync,
|
||||
nowMs: number,
|
||||
policy: {
|
||||
eventsRetentionMs: number;
|
||||
telemetryRetentionMs: number;
|
||||
dailyRollupRetentionMs: number;
|
||||
monthlyRollupRetentionMs: number;
|
||||
sessionsRetentionMs: number;
|
||||
},
|
||||
): RetentionResult {
|
||||
): RawRetentionResult {
|
||||
const eventCutoff = nowMs - policy.eventsRetentionMs;
|
||||
const telemetryCutoff = nowMs - policy.telemetryRetentionMs;
|
||||
const dayCutoff = nowMs - policy.dailyRollupRetentionMs;
|
||||
const monthCutoff = nowMs - policy.monthlyRollupRetentionMs;
|
||||
const sessionsCutoff = nowMs - policy.sessionsRetentionMs;
|
||||
|
||||
const deletedSessionEvents = (
|
||||
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(eventCutoff) as {
|
||||
@@ -56,28 +52,49 @@ export function pruneRetention(
|
||||
changes: number;
|
||||
}
|
||||
).changes;
|
||||
const deletedDailyRows = (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
|
||||
.run(Math.floor(dayCutoff / DAILY_MS)) as { changes: number }
|
||||
).changes;
|
||||
const deletedMonthlyRows = (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
|
||||
.run(toMonthKey(monthCutoff)) as { changes: number }
|
||||
).changes;
|
||||
const deletedEndedSessions = (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
|
||||
.run(telemetryCutoff) as { changes: number }
|
||||
.run(sessionsCutoff) as { changes: number }
|
||||
).changes;
|
||||
|
||||
return {
|
||||
deletedSessionEvents,
|
||||
deletedTelemetryRows,
|
||||
deletedEndedSessions,
|
||||
};
|
||||
}
|
||||
|
||||
export function pruneRollupRetention(
|
||||
db: DatabaseSync,
|
||||
nowMs: number,
|
||||
policy: {
|
||||
dailyRollupRetentionMs: number;
|
||||
monthlyRollupRetentionMs: number;
|
||||
},
|
||||
): { deletedDailyRows: number; deletedMonthlyRows: number } {
|
||||
const deletedDailyRows = Number.isFinite(policy.dailyRollupRetentionMs)
|
||||
? (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
|
||||
.run(Math.floor((nowMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as {
|
||||
changes: number;
|
||||
}
|
||||
).changes
|
||||
: 0;
|
||||
const deletedMonthlyRows = Number.isFinite(policy.monthlyRollupRetentionMs)
|
||||
? (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
|
||||
.run(toMonthKey(nowMs - policy.monthlyRollupRetentionMs)) as {
|
||||
changes: number;
|
||||
}
|
||||
).changes
|
||||
: 0;
|
||||
|
||||
return {
|
||||
deletedDailyRows,
|
||||
deletedMonthlyRows,
|
||||
deletedEndedSessions,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -108,49 +125,57 @@ function upsertDailyRollupsForGroups(
|
||||
const upsertStmt = db.prepare(`
|
||||
INSERT INTO imm_daily_rollups (
|
||||
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_words_seen, total_tokens_seen, total_cards, cards_per_hour,
|
||||
words_per_min, lookup_hit_rate, CREATED_DATE, LAST_UPDATE_DATE
|
||||
total_tokens_seen, total_cards, cards_per_hour,
|
||||
tokens_per_min, lookup_hit_rate, CREATED_DATE, LAST_UPDATE_DATE
|
||||
)
|
||||
SELECT
|
||||
CAST(s.started_at_ms / 86400000 AS INTEGER) AS rollup_day,
|
||||
CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS rollup_day,
|
||||
s.video_id AS video_id,
|
||||
COUNT(DISTINCT s.session_id) AS total_sessions,
|
||||
COALESCE(SUM(t.active_watched_ms), 0) / 60000.0 AS total_active_min,
|
||||
COALESCE(SUM(t.lines_seen), 0) AS total_lines_seen,
|
||||
COALESCE(SUM(t.words_seen), 0) AS total_words_seen,
|
||||
COALESCE(SUM(t.tokens_seen), 0) AS total_tokens_seen,
|
||||
COALESCE(SUM(t.cards_mined), 0) AS total_cards,
|
||||
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
|
||||
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
|
||||
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
|
||||
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(t.active_watched_ms), 0) > 0
|
||||
THEN (COALESCE(SUM(t.cards_mined), 0) * 60.0) / (COALESCE(SUM(t.active_watched_ms), 0) / 60000.0)
|
||||
WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0
|
||||
THEN (COALESCE(SUM(sm.max_cards), 0) * 60.0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
|
||||
ELSE NULL
|
||||
END AS cards_per_hour,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(t.active_watched_ms), 0) > 0
|
||||
THEN COALESCE(SUM(t.words_seen), 0) / (COALESCE(SUM(t.active_watched_ms), 0) / 60000.0)
|
||||
WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0
|
||||
THEN COALESCE(SUM(sm.max_tokens), 0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
|
||||
ELSE NULL
|
||||
END AS words_per_min,
|
||||
END AS tokens_per_min,
|
||||
CASE
|
||||
WHEN COALESCE(SUM(t.lookup_count), 0) > 0
|
||||
THEN CAST(COALESCE(SUM(t.lookup_hits), 0) AS REAL) / CAST(SUM(t.lookup_count) AS REAL)
|
||||
WHEN COALESCE(SUM(sm.max_lookups), 0) > 0
|
||||
THEN CAST(COALESCE(SUM(sm.max_hits), 0) AS REAL) / CAST(SUM(sm.max_lookups) AS REAL)
|
||||
ELSE NULL
|
||||
END AS lookup_hit_rate,
|
||||
? AS CREATED_DATE,
|
||||
? AS LAST_UPDATE_DATE
|
||||
FROM imm_sessions s
|
||||
JOIN imm_session_telemetry t
|
||||
ON t.session_id = s.session_id
|
||||
WHERE CAST(s.started_at_ms / 86400000 AS INTEGER) = ? AND s.video_id = ?
|
||||
JOIN (
|
||||
SELECT
|
||||
t.session_id,
|
||||
MAX(t.active_watched_ms) AS max_active_ms,
|
||||
MAX(t.lines_seen) AS max_lines,
|
||||
MAX(t.tokens_seen) AS max_tokens,
|
||||
MAX(t.cards_mined) AS max_cards,
|
||||
MAX(t.lookup_count) AS max_lookups,
|
||||
MAX(t.lookup_hits) AS max_hits
|
||||
FROM imm_session_telemetry t
|
||||
GROUP BY t.session_id
|
||||
) sm ON s.session_id = sm.session_id
|
||||
WHERE CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) = ? AND s.video_id = ?
|
||||
GROUP BY rollup_day, s.video_id
|
||||
ON CONFLICT (rollup_day, video_id) DO UPDATE SET
|
||||
total_sessions = excluded.total_sessions,
|
||||
total_active_min = excluded.total_active_min,
|
||||
total_lines_seen = excluded.total_lines_seen,
|
||||
total_words_seen = excluded.total_words_seen,
|
||||
total_tokens_seen = excluded.total_tokens_seen,
|
||||
total_cards = excluded.total_cards,
|
||||
cards_per_hour = excluded.cards_per_hour,
|
||||
words_per_min = excluded.words_per_min,
|
||||
tokens_per_min = excluded.tokens_per_min,
|
||||
lookup_hit_rate = excluded.lookup_hit_rate,
|
||||
CREATED_DATE = COALESCE(imm_daily_rollups.CREATED_DATE, excluded.CREATED_DATE),
|
||||
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
|
||||
@@ -173,29 +198,35 @@ function upsertMonthlyRollupsForGroups(
|
||||
const upsertStmt = db.prepare(`
|
||||
INSERT INTO imm_monthly_rollups (
|
||||
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_words_seen, total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
|
||||
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
|
||||
)
|
||||
SELECT
|
||||
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch') AS INTEGER) AS rollup_month,
|
||||
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) AS rollup_month,
|
||||
s.video_id AS video_id,
|
||||
COUNT(DISTINCT s.session_id) AS total_sessions,
|
||||
COALESCE(SUM(t.active_watched_ms), 0) / 60000.0 AS total_active_min,
|
||||
COALESCE(SUM(t.lines_seen), 0) AS total_lines_seen,
|
||||
COALESCE(SUM(t.words_seen), 0) AS total_words_seen,
|
||||
COALESCE(SUM(t.tokens_seen), 0) AS total_tokens_seen,
|
||||
COALESCE(SUM(t.cards_mined), 0) AS total_cards,
|
||||
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
|
||||
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
|
||||
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
|
||||
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
|
||||
? AS CREATED_DATE,
|
||||
? AS LAST_UPDATE_DATE
|
||||
FROM imm_sessions s
|
||||
JOIN imm_session_telemetry t
|
||||
ON t.session_id = s.session_id
|
||||
WHERE CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch') AS INTEGER) = ? AND s.video_id = ?
|
||||
JOIN (
|
||||
SELECT
|
||||
t.session_id,
|
||||
MAX(t.active_watched_ms) AS max_active_ms,
|
||||
MAX(t.lines_seen) AS max_lines,
|
||||
MAX(t.tokens_seen) AS max_tokens,
|
||||
MAX(t.cards_mined) AS max_cards
|
||||
FROM imm_session_telemetry t
|
||||
GROUP BY t.session_id
|
||||
) sm ON s.session_id = sm.session_id
|
||||
WHERE CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) = ? AND s.video_id = ?
|
||||
GROUP BY rollup_month, s.video_id
|
||||
ON CONFLICT (rollup_month, video_id) DO UPDATE SET
|
||||
total_sessions = excluded.total_sessions,
|
||||
total_active_min = excluded.total_active_min,
|
||||
total_lines_seen = excluded.total_lines_seen,
|
||||
total_words_seen = excluded.total_words_seen,
|
||||
total_tokens_seen = excluded.total_tokens_seen,
|
||||
total_cards = excluded.total_cards,
|
||||
CREATED_DATE = COALESCE(imm_monthly_rollups.CREATED_DATE, excluded.CREATED_DATE),
|
||||
@@ -216,8 +247,8 @@ function getAffectedRollupGroups(
|
||||
.prepare(
|
||||
`
|
||||
SELECT DISTINCT
|
||||
CAST(s.started_at_ms / 86400000 AS INTEGER) AS rollup_day,
|
||||
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch') AS INTEGER) AS rollup_month,
|
||||
CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS rollup_day,
|
||||
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) AS rollup_month,
|
||||
s.video_id AS video_id
|
||||
FROM imm_session_telemetry t
|
||||
JOIN imm_sessions s
|
||||
@@ -292,3 +323,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export function runOptimizeMaintenance(db: DatabaseSync): void {
|
||||
db.exec('PRAGMA optimize');
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ import { EventEmitter } from 'node:events';
|
||||
import test from 'node:test';
|
||||
import type { spawn as spawnFn } from 'node:child_process';
|
||||
import { SOURCE_TYPE_LOCAL } from './types';
|
||||
import { getLocalVideoMetadata, runFfprobe } from './metadata';
|
||||
import { getLocalVideoMetadata, guessAnimeVideoMetadata, runFfprobe } from './metadata';
|
||||
|
||||
type Spawn = typeof spawnFn;
|
||||
|
||||
@@ -146,3 +146,83 @@ test('getLocalVideoMetadata derives title and falls back to null hash on read er
|
||||
assert.equal(hashFallbackMetadata.canonicalTitle, 'Episode 02');
|
||||
assert.equal(hashFallbackMetadata.hashSha256, null);
|
||||
});
|
||||
|
||||
test('guessAnimeVideoMetadata uses guessit basename output first when available', async () => {
|
||||
const seenTargets: string[] = [];
|
||||
const parsed = await guessAnimeVideoMetadata(
|
||||
'/tmp/Little Witch Academia S02E05.mkv',
|
||||
'Episode 5',
|
||||
{
|
||||
runGuessit: async (target) => {
|
||||
seenTargets.push(target);
|
||||
return JSON.stringify({
|
||||
title: 'Little Witch Academia',
|
||||
season: 2,
|
||||
episode: 5,
|
||||
});
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
assert.deepEqual(seenTargets, ['Little Witch Academia S02E05.mkv']);
|
||||
assert.deepEqual(parsed, {
|
||||
parsedBasename: 'Little Witch Academia S02E05.mkv',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 5,
|
||||
parserSource: 'guessit',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: JSON.stringify({
|
||||
filename: 'Little Witch Academia S02E05.mkv',
|
||||
source: 'guessit',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
test('guessAnimeVideoMetadata falls back to parser when guessit throws', async () => {
|
||||
const parsed = await guessAnimeVideoMetadata(
|
||||
'/tmp/Little Witch Academia S02E05.mkv',
|
||||
'Episode 5',
|
||||
{
|
||||
runGuessit: async () => {
|
||||
throw new Error('guessit unavailable');
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
assert.deepEqual(parsed, {
|
||||
parsedBasename: 'Little Witch Academia S02E05.mkv',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 5,
|
||||
parserSource: 'fallback',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: JSON.stringify({
|
||||
confidence: 'high',
|
||||
filename: 'Little Witch Academia S02E05.mkv',
|
||||
rawTitle: 'Little Witch Academia S02E05',
|
||||
source: 'fallback',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
test('guessAnimeVideoMetadata falls back when guessit output is incomplete', async () => {
|
||||
const parsed = await guessAnimeVideoMetadata('/tmp/[SubsPlease] Frieren - 03 (1080p).mkv', null, {
|
||||
runGuessit: async () => JSON.stringify({ episode: 3 }),
|
||||
});
|
||||
|
||||
assert.deepEqual(parsed, {
|
||||
parsedBasename: '[SubsPlease] Frieren - 03 (1080p).mkv',
|
||||
parsedTitle: 'Frieren - 03 (1080p)',
|
||||
parsedSeason: null,
|
||||
parsedEpisode: null,
|
||||
parserSource: 'fallback',
|
||||
parserConfidence: 0.2,
|
||||
parseMetadataJson: JSON.stringify({
|
||||
confidence: 'low',
|
||||
filename: '[SubsPlease] Frieren - 03 (1080p).mkv',
|
||||
rawTitle: 'Frieren - 03 (1080p)',
|
||||
source: 'fallback',
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
import crypto from 'node:crypto';
|
||||
import { spawn as nodeSpawn } from 'node:child_process';
|
||||
import * as fs from 'node:fs';
|
||||
import path from 'node:path';
|
||||
import { parseMediaInfo } from '../../../jimaku/utils';
|
||||
import {
|
||||
guessAnilistMediaInfo,
|
||||
runGuessit,
|
||||
type GuessAnilistMediaInfoDeps,
|
||||
} from '../anilist/anilist-updater';
|
||||
import {
|
||||
deriveCanonicalTitle,
|
||||
emptyMetadata,
|
||||
@@ -8,7 +15,12 @@ import {
|
||||
parseFps,
|
||||
toNullableInt,
|
||||
} from './reducer';
|
||||
import { SOURCE_TYPE_LOCAL, type ProbeMetadata, type VideoMetadata } from './types';
|
||||
import {
|
||||
SOURCE_TYPE_LOCAL,
|
||||
type ParsedAnimeVideoGuess,
|
||||
type ProbeMetadata,
|
||||
type VideoMetadata,
|
||||
} from './types';
|
||||
|
||||
type SpawnFn = typeof nodeSpawn;
|
||||
|
||||
@@ -24,6 +36,21 @@ interface MetadataDeps {
|
||||
fs?: FsDeps;
|
||||
}
|
||||
|
||||
interface GuessAnimeVideoMetadataDeps {
|
||||
runGuessit?: GuessAnilistMediaInfoDeps['runGuessit'];
|
||||
}
|
||||
|
||||
function mapParserConfidenceToScore(confidence: 'high' | 'medium' | 'low'): number {
|
||||
switch (confidence) {
|
||||
case 'high':
|
||||
return 1;
|
||||
case 'medium':
|
||||
return 0.6;
|
||||
default:
|
||||
return 0.2;
|
||||
}
|
||||
}
|
||||
|
||||
export async function computeSha256(
|
||||
mediaPath: string,
|
||||
deps: MetadataDeps = {},
|
||||
@@ -151,3 +178,48 @@ export async function getLocalVideoMetadata(
|
||||
metadataJson: null,
|
||||
};
|
||||
}
|
||||
|
||||
export async function guessAnimeVideoMetadata(
|
||||
mediaPath: string | null,
|
||||
mediaTitle: string | null,
|
||||
deps: GuessAnimeVideoMetadataDeps = {},
|
||||
): Promise<ParsedAnimeVideoGuess | null> {
|
||||
const parsed = await guessAnilistMediaInfo(mediaPath, mediaTitle, {
|
||||
runGuessit: deps.runGuessit ?? runGuessit,
|
||||
});
|
||||
if (!parsed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const parsedBasename = mediaPath ? path.basename(mediaPath) : null;
|
||||
if (parsed.source === 'guessit') {
|
||||
return {
|
||||
parsedBasename,
|
||||
parsedTitle: parsed.title,
|
||||
parsedSeason: parsed.season,
|
||||
parsedEpisode: parsed.episode,
|
||||
parserSource: 'guessit',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: JSON.stringify({
|
||||
filename: parsedBasename,
|
||||
source: 'guessit',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
const fallbackInfo = parseMediaInfo(mediaPath ?? mediaTitle);
|
||||
return {
|
||||
parsedBasename: parsedBasename ?? fallbackInfo.filename ?? null,
|
||||
parsedTitle: parsed.title,
|
||||
parsedSeason: parsed.season,
|
||||
parsedEpisode: parsed.episode,
|
||||
parserSource: 'fallback',
|
||||
parserConfidence: mapParserConfidenceToScore(fallbackInfo.confidence),
|
||||
parseMetadataJson: JSON.stringify({
|
||||
confidence: fallbackInfo.confidence,
|
||||
filename: fallbackInfo.filename,
|
||||
rawTitle: fallbackInfo.rawTitle,
|
||||
source: 'fallback',
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,11 +15,11 @@ export function createInitialSessionState(
|
||||
totalWatchedMs: 0,
|
||||
activeWatchedMs: 0,
|
||||
linesSeen: 0,
|
||||
wordsSeen: 0,
|
||||
tokensSeen: 0,
|
||||
cardsMined: 0,
|
||||
lookupCount: 0,
|
||||
lookupHits: 0,
|
||||
yomitanLookupCount: 0,
|
||||
pauseCount: 0,
|
||||
pauseMs: 0,
|
||||
seekForwardCount: 0,
|
||||
@@ -30,6 +30,7 @@ export function createInitialSessionState(
|
||||
lastPauseStartMs: null,
|
||||
isPaused: false,
|
||||
pendingTelemetry: true,
|
||||
markedWatched: false,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -50,16 +51,6 @@ export function sanitizePayload(payload: Record<string, unknown>, maxPayloadByte
|
||||
return json.length <= maxPayloadBytes ? json : JSON.stringify({ truncated: true });
|
||||
}
|
||||
|
||||
export function calculateTextMetrics(value: string): {
|
||||
words: number;
|
||||
tokens: number;
|
||||
} {
|
||||
const words = value.split(/\s+/).filter(Boolean).length;
|
||||
const cjkCount = value.match(/[\u3040-\u30ff\u4e00-\u9fff]/g)?.length ?? 0;
|
||||
const tokens = Math.max(words, cjkCount);
|
||||
return { words, tokens };
|
||||
}
|
||||
|
||||
export function secToMs(seconds: number): number {
|
||||
const coerced = Number(seconds);
|
||||
if (!Number.isFinite(coerced)) return 0;
|
||||
|
||||
@@ -39,8 +39,41 @@ export function finalizeSessionRecord(
|
||||
SET
|
||||
ended_at_ms = ?,
|
||||
status = ?,
|
||||
ended_media_ms = ?,
|
||||
total_watched_ms = ?,
|
||||
active_watched_ms = ?,
|
||||
lines_seen = ?,
|
||||
tokens_seen = ?,
|
||||
cards_mined = ?,
|
||||
lookup_count = ?,
|
||||
lookup_hits = ?,
|
||||
yomitan_lookup_count = ?,
|
||||
pause_count = ?,
|
||||
pause_ms = ?,
|
||||
seek_forward_count = ?,
|
||||
seek_backward_count = ?,
|
||||
media_buffer_events = ?,
|
||||
LAST_UPDATE_DATE = ?
|
||||
WHERE session_id = ?
|
||||
`,
|
||||
).run(endedAtMs, SESSION_STATUS_ENDED, Date.now(), sessionState.sessionId);
|
||||
).run(
|
||||
endedAtMs,
|
||||
SESSION_STATUS_ENDED,
|
||||
sessionState.lastMediaMs,
|
||||
sessionState.totalWatchedMs,
|
||||
sessionState.activeWatchedMs,
|
||||
sessionState.linesSeen,
|
||||
sessionState.tokensSeen,
|
||||
sessionState.cardsMined,
|
||||
sessionState.lookupCount,
|
||||
sessionState.lookupHits,
|
||||
sessionState.yomitanLookupCount,
|
||||
sessionState.pauseCount,
|
||||
sessionState.pauseMs,
|
||||
sessionState.seekForwardCount,
|
||||
sessionState.seekBackwardCount,
|
||||
sessionState.mediaBufferEvents,
|
||||
Date.now(),
|
||||
sessionState.sessionId,
|
||||
);
|
||||
}
|
||||
|
||||
@@ -6,10 +6,15 @@ import test from 'node:test';
|
||||
import { Database } from './sqlite';
|
||||
import { finalizeSessionRecord, startSessionRecord } from './session';
|
||||
import {
|
||||
applyPragmas,
|
||||
createTrackerPreparedStatements,
|
||||
ensureSchema,
|
||||
executeQueuedWrite,
|
||||
normalizeCoverBlobBytes,
|
||||
parseCoverBlobReference,
|
||||
getOrCreateAnimeRecord,
|
||||
getOrCreateVideoRecord,
|
||||
linkVideoToAnimeRecord,
|
||||
} from './storage';
|
||||
import { EVENT_SUBTITLE_LINE, SESSION_STATUS_ENDED, SOURCE_TYPE_LOCAL } from './types';
|
||||
|
||||
@@ -46,6 +51,34 @@ function cleanupDbPath(dbPath: string): void {
|
||||
// libsql keeps Windows file handles alive after close when prepared statements were used.
|
||||
}
|
||||
|
||||
test('applyPragmas sets the SQLite tuning defaults used by immersion tracking', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
applyPragmas(db);
|
||||
|
||||
const journalModeRow = db.prepare('PRAGMA journal_mode').get() as {
|
||||
journal_mode: string;
|
||||
};
|
||||
const synchronousRow = db.prepare('PRAGMA synchronous').get() as { synchronous: number };
|
||||
const foreignKeysRow = db.prepare('PRAGMA foreign_keys').get() as { foreign_keys: number };
|
||||
const busyTimeoutRow = db.prepare('PRAGMA busy_timeout').get() as { timeout: number };
|
||||
const journalSizeLimitRow = db.prepare('PRAGMA journal_size_limit').get() as {
|
||||
journal_size_limit: number;
|
||||
};
|
||||
|
||||
assert.equal(journalModeRow.journal_mode, 'wal');
|
||||
assert.equal(synchronousRow.synchronous, 1);
|
||||
assert.equal(foreignKeysRow.foreign_keys, 1);
|
||||
assert.equal(busyTimeoutRow.timeout, 2500);
|
||||
assert.equal(journalSizeLimitRow.journal_size_limit, 67_108_864);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema creates immersion core tables', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
@@ -60,6 +93,7 @@ test('ensureSchema creates immersion core tables', () => {
|
||||
const tableNames = new Set(rows.map((row) => row.name));
|
||||
|
||||
assert.ok(tableNames.has('imm_videos'));
|
||||
assert.ok(tableNames.has('imm_anime'));
|
||||
assert.ok(tableNames.has('imm_sessions'));
|
||||
assert.ok(tableNames.has('imm_session_telemetry'));
|
||||
assert.ok(tableNames.has('imm_session_events'));
|
||||
@@ -67,7 +101,37 @@ test('ensureSchema creates immersion core tables', () => {
|
||||
assert.ok(tableNames.has('imm_monthly_rollups'));
|
||||
assert.ok(tableNames.has('imm_words'));
|
||||
assert.ok(tableNames.has('imm_kanji'));
|
||||
assert.ok(tableNames.has('imm_subtitle_lines'));
|
||||
assert.ok(tableNames.has('imm_word_line_occurrences'));
|
||||
assert.ok(tableNames.has('imm_kanji_line_occurrences'));
|
||||
assert.ok(tableNames.has('imm_rollup_state'));
|
||||
assert.ok(tableNames.has('imm_cover_art_blobs'));
|
||||
|
||||
const videoColumns = new Set(
|
||||
(
|
||||
db.prepare('PRAGMA table_info(imm_videos)').all() as Array<{
|
||||
name: string;
|
||||
}>
|
||||
).map((row) => row.name),
|
||||
);
|
||||
|
||||
assert.ok(videoColumns.has('anime_id'));
|
||||
assert.ok(videoColumns.has('parsed_basename'));
|
||||
assert.ok(videoColumns.has('parsed_title'));
|
||||
assert.ok(videoColumns.has('parsed_season'));
|
||||
assert.ok(videoColumns.has('parsed_episode'));
|
||||
assert.ok(videoColumns.has('parser_source'));
|
||||
assert.ok(videoColumns.has('parser_confidence'));
|
||||
assert.ok(videoColumns.has('parse_metadata_json'));
|
||||
|
||||
const mediaArtColumns = new Set(
|
||||
(
|
||||
db.prepare('PRAGMA table_info(imm_media_art)').all() as Array<{
|
||||
name: string;
|
||||
}>
|
||||
).map((row) => row.name),
|
||||
);
|
||||
assert.ok(mediaArtColumns.has('cover_blob_hash'));
|
||||
|
||||
const rollupStateRow = db
|
||||
.prepare('SELECT state_value FROM imm_rollup_state WHERE state_key = ?')
|
||||
@@ -82,6 +146,566 @@ test('ensureSchema creates immersion core tables', () => {
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema creates large-history performance indexes', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const indexNames = new Set(
|
||||
(
|
||||
db
|
||||
.prepare(`SELECT name FROM sqlite_master WHERE type = 'index' AND name LIKE 'idx_%'`)
|
||||
.all() as Array<{
|
||||
name: string;
|
||||
}>
|
||||
).map((row) => row.name),
|
||||
);
|
||||
|
||||
assert.ok(indexNames.has('idx_telemetry_sample_ms'));
|
||||
assert.ok(indexNames.has('idx_sessions_started_at'));
|
||||
assert.ok(indexNames.has('idx_sessions_ended_at'));
|
||||
assert.ok(indexNames.has('idx_words_frequency'));
|
||||
assert.ok(indexNames.has('idx_kanji_frequency'));
|
||||
assert.ok(indexNames.has('idx_media_art_anilist_id'));
|
||||
assert.ok(indexNames.has('idx_media_art_cover_url'));
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema migrates legacy videos and backfills anime metadata from filenames', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
db.exec(`
|
||||
CREATE TABLE imm_schema_version (
|
||||
schema_version INTEGER PRIMARY KEY,
|
||||
applied_at_ms INTEGER NOT NULL
|
||||
);
|
||||
INSERT INTO imm_schema_version(schema_version, applied_at_ms) VALUES (4, 1);
|
||||
|
||||
CREATE TABLE imm_videos(
|
||||
video_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
video_key TEXT NOT NULL UNIQUE,
|
||||
canonical_title TEXT NOT NULL,
|
||||
source_type INTEGER NOT NULL,
|
||||
source_path TEXT,
|
||||
source_url TEXT,
|
||||
duration_ms INTEGER NOT NULL CHECK(duration_ms>=0),
|
||||
file_size_bytes INTEGER CHECK(file_size_bytes>=0),
|
||||
codec_id INTEGER, container_id INTEGER,
|
||||
width_px INTEGER, height_px INTEGER, fps_x100 INTEGER,
|
||||
bitrate_kbps INTEGER, audio_codec_id INTEGER,
|
||||
hash_sha256 TEXT, screenshot_path TEXT,
|
||||
metadata_json TEXT,
|
||||
CREATED_DATE INTEGER,
|
||||
LAST_UPDATE_DATE INTEGER
|
||||
);
|
||||
`);
|
||||
|
||||
const insertLegacyVideo = db.prepare(`
|
||||
INSERT INTO imm_videos (
|
||||
video_key, canonical_title, source_type, source_path, source_url,
|
||||
duration_ms, file_size_bytes, codec_id, container_id, width_px, height_px,
|
||||
fps_x100, bitrate_kbps, audio_codec_id, hash_sha256, screenshot_path,
|
||||
metadata_json, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`);
|
||||
|
||||
insertLegacyVideo.run(
|
||||
'local:/library/Little Witch Academia S02E05.mkv',
|
||||
'Episode 5',
|
||||
SOURCE_TYPE_LOCAL,
|
||||
'/library/Little Witch Academia S02E05.mkv',
|
||||
null,
|
||||
0,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
insertLegacyVideo.run(
|
||||
'local:/library/Little Witch Academia S02E06.mkv',
|
||||
'Episode 6',
|
||||
SOURCE_TYPE_LOCAL,
|
||||
'/library/Little Witch Academia S02E06.mkv',
|
||||
null,
|
||||
0,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
insertLegacyVideo.run(
|
||||
'local:/library/[SubsPlease] Frieren - 03 - Departure.mkv',
|
||||
'Episode 3',
|
||||
SOURCE_TYPE_LOCAL,
|
||||
'/library/[SubsPlease] Frieren - 03 - Departure.mkv',
|
||||
null,
|
||||
0,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
1,
|
||||
1,
|
||||
);
|
||||
|
||||
ensureSchema(db);
|
||||
|
||||
const videoColumns = new Set(
|
||||
(
|
||||
db.prepare('PRAGMA table_info(imm_videos)').all() as Array<{
|
||||
name: string;
|
||||
}>
|
||||
).map((row) => row.name),
|
||||
);
|
||||
assert.ok(videoColumns.has('anime_id'));
|
||||
assert.ok(videoColumns.has('parsed_basename'));
|
||||
assert.ok(videoColumns.has('parsed_title'));
|
||||
assert.ok(videoColumns.has('parsed_season'));
|
||||
assert.ok(videoColumns.has('parsed_episode'));
|
||||
assert.ok(videoColumns.has('parser_source'));
|
||||
assert.ok(videoColumns.has('parser_confidence'));
|
||||
assert.ok(videoColumns.has('parse_metadata_json'));
|
||||
|
||||
const animeRows = db
|
||||
.prepare('SELECT canonical_title FROM imm_anime ORDER BY canonical_title')
|
||||
.all() as Array<{ canonical_title: string }>;
|
||||
assert.deepEqual(
|
||||
animeRows.map((row) => row.canonical_title),
|
||||
['Frieren', 'Little Witch Academia'],
|
||||
);
|
||||
|
||||
const littleWitchRows = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT
|
||||
a.canonical_title AS anime_title,
|
||||
v.parsed_title,
|
||||
v.parsed_basename,
|
||||
v.parsed_season,
|
||||
v.parsed_episode,
|
||||
v.parser_source,
|
||||
v.parser_confidence
|
||||
FROM imm_videos v
|
||||
JOIN imm_anime a ON a.anime_id = v.anime_id
|
||||
WHERE v.video_key LIKE 'local:/library/Little Witch Academia%'
|
||||
ORDER BY v.video_key
|
||||
`,
|
||||
)
|
||||
.all() as Array<{
|
||||
anime_title: string;
|
||||
parsed_title: string | null;
|
||||
parsed_basename: string | null;
|
||||
parsed_season: number | null;
|
||||
parsed_episode: number | null;
|
||||
parser_source: string | null;
|
||||
parser_confidence: number | null;
|
||||
}>;
|
||||
|
||||
assert.equal(littleWitchRows.length, 2);
|
||||
assert.deepEqual(
|
||||
littleWitchRows.map((row) => ({
|
||||
animeTitle: row.anime_title,
|
||||
parsedTitle: row.parsed_title,
|
||||
parsedBasename: row.parsed_basename,
|
||||
parsedSeason: row.parsed_season,
|
||||
parsedEpisode: row.parsed_episode,
|
||||
parserSource: row.parser_source,
|
||||
})),
|
||||
[
|
||||
{
|
||||
animeTitle: 'Little Witch Academia',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedBasename: 'Little Witch Academia S02E05.mkv',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 5,
|
||||
parserSource: 'fallback',
|
||||
},
|
||||
{
|
||||
animeTitle: 'Little Witch Academia',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedBasename: 'Little Witch Academia S02E06.mkv',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 6,
|
||||
parserSource: 'fallback',
|
||||
},
|
||||
],
|
||||
);
|
||||
assert.ok(
|
||||
littleWitchRows.every(
|
||||
(row) => typeof row.parser_confidence === 'number' && row.parser_confidence > 0,
|
||||
),
|
||||
);
|
||||
|
||||
const frierenRow = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT
|
||||
a.canonical_title AS anime_title,
|
||||
v.parsed_title,
|
||||
v.parsed_episode,
|
||||
v.parser_source
|
||||
FROM imm_videos v
|
||||
JOIN imm_anime a ON a.anime_id = v.anime_id
|
||||
WHERE v.video_key = ?
|
||||
`,
|
||||
)
|
||||
.get('local:/library/[SubsPlease] Frieren - 03 - Departure.mkv') as {
|
||||
anime_title: string;
|
||||
parsed_title: string | null;
|
||||
parsed_episode: number | null;
|
||||
parser_source: string | null;
|
||||
} | null;
|
||||
|
||||
assert.ok(frierenRow);
|
||||
assert.equal(frierenRow?.anime_title, 'Frieren');
|
||||
assert.equal(frierenRow?.parsed_title, 'Frieren');
|
||||
assert.equal(frierenRow?.parsed_episode, 3);
|
||||
assert.equal(frierenRow?.parser_source, 'fallback');
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema adds subtitle-line occurrence tables to schema version 6 databases', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
db.exec(`
|
||||
CREATE TABLE imm_schema_version (
|
||||
schema_version INTEGER PRIMARY KEY,
|
||||
applied_at_ms INTEGER NOT NULL
|
||||
);
|
||||
INSERT INTO imm_schema_version(schema_version, applied_at_ms) VALUES (6, 1);
|
||||
|
||||
CREATE TABLE imm_videos(
|
||||
video_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
video_key TEXT NOT NULL UNIQUE,
|
||||
anime_id INTEGER,
|
||||
canonical_title TEXT NOT NULL,
|
||||
source_type INTEGER NOT NULL,
|
||||
source_path TEXT,
|
||||
source_url TEXT,
|
||||
parsed_basename TEXT,
|
||||
parsed_title TEXT,
|
||||
parsed_season INTEGER,
|
||||
parsed_episode INTEGER,
|
||||
parser_source TEXT,
|
||||
parser_confidence REAL,
|
||||
parse_metadata_json TEXT,
|
||||
duration_ms INTEGER NOT NULL CHECK(duration_ms>=0),
|
||||
file_size_bytes INTEGER CHECK(file_size_bytes>=0),
|
||||
codec_id INTEGER, container_id INTEGER,
|
||||
width_px INTEGER, height_px INTEGER, fps_x100 INTEGER,
|
||||
bitrate_kbps INTEGER, audio_codec_id INTEGER,
|
||||
hash_sha256 TEXT, screenshot_path TEXT,
|
||||
metadata_json TEXT,
|
||||
CREATED_DATE INTEGER,
|
||||
LAST_UPDATE_DATE INTEGER
|
||||
);
|
||||
CREATE TABLE imm_sessions(
|
||||
session_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_uuid TEXT NOT NULL UNIQUE,
|
||||
video_id INTEGER NOT NULL,
|
||||
started_at_ms INTEGER NOT NULL,
|
||||
ended_at_ms INTEGER,
|
||||
status INTEGER NOT NULL,
|
||||
locale_id INTEGER,
|
||||
target_lang_id INTEGER,
|
||||
difficulty_tier INTEGER,
|
||||
subtitle_mode INTEGER,
|
||||
CREATED_DATE INTEGER,
|
||||
LAST_UPDATE_DATE INTEGER
|
||||
);
|
||||
CREATE TABLE imm_session_events(
|
||||
event_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
ts_ms INTEGER NOT NULL,
|
||||
event_type INTEGER NOT NULL,
|
||||
line_index INTEGER,
|
||||
segment_start_ms INTEGER,
|
||||
segment_end_ms INTEGER,
|
||||
words_delta INTEGER NOT NULL DEFAULT 0,
|
||||
cards_delta INTEGER NOT NULL DEFAULT 0,
|
||||
payload_json TEXT,
|
||||
CREATED_DATE INTEGER,
|
||||
LAST_UPDATE_DATE INTEGER
|
||||
);
|
||||
CREATE TABLE imm_words(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
headword TEXT,
|
||||
word TEXT,
|
||||
reading TEXT,
|
||||
part_of_speech TEXT,
|
||||
pos1 TEXT,
|
||||
pos2 TEXT,
|
||||
pos3 TEXT,
|
||||
first_seen REAL,
|
||||
last_seen REAL,
|
||||
frequency INTEGER,
|
||||
UNIQUE(headword, word, reading)
|
||||
);
|
||||
CREATE TABLE imm_kanji(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
kanji TEXT,
|
||||
first_seen REAL,
|
||||
last_seen REAL,
|
||||
frequency INTEGER,
|
||||
UNIQUE(kanji)
|
||||
);
|
||||
CREATE TABLE imm_rollup_state(
|
||||
state_key TEXT PRIMARY KEY,
|
||||
state_value INTEGER NOT NULL
|
||||
);
|
||||
`);
|
||||
|
||||
ensureSchema(db);
|
||||
|
||||
const tableNames = new Set(
|
||||
(
|
||||
db
|
||||
.prepare(`SELECT name FROM sqlite_master WHERE type = 'table' AND name LIKE 'imm_%'`)
|
||||
.all() as Array<{ name: string }>
|
||||
).map((row) => row.name),
|
||||
);
|
||||
|
||||
assert.ok(tableNames.has('imm_subtitle_lines'));
|
||||
assert.ok(tableNames.has('imm_word_line_occurrences'));
|
||||
assert.ok(tableNames.has('imm_kanji_line_occurrences'));
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema migrates legacy cover art blobs into the shared blob store', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
db.prepare('UPDATE imm_schema_version SET schema_version = 12').run();
|
||||
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/legacy-cover-art.mkv', {
|
||||
canonicalTitle: 'Legacy Cover Art',
|
||||
sourcePath: '/tmp/legacy-cover-art.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const legacyBlob = Uint8Array.from([0xde, 0xad, 0xbe, 0xef]);
|
||||
|
||||
db.prepare(
|
||||
`
|
||||
INSERT INTO imm_media_art (
|
||||
video_id,
|
||||
anilist_id,
|
||||
cover_url,
|
||||
cover_blob,
|
||||
cover_blob_hash,
|
||||
title_romaji,
|
||||
title_english,
|
||||
episodes_total,
|
||||
fetched_at_ms,
|
||||
CREATED_DATE,
|
||||
LAST_UPDATE_DATE
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`,
|
||||
).run(videoId, null, null, legacyBlob, null, null, null, null, 1, 1, 1);
|
||||
|
||||
assert.doesNotThrow(() => ensureSchema(db));
|
||||
|
||||
const mediaArtRow = db
|
||||
.prepare(
|
||||
'SELECT cover_blob AS coverBlob, cover_blob_hash AS coverBlobHash FROM imm_media_art',
|
||||
)
|
||||
.get() as {
|
||||
coverBlob: ArrayBuffer | Uint8Array | Buffer | null;
|
||||
coverBlobHash: string | null;
|
||||
} | null;
|
||||
|
||||
assert.ok(mediaArtRow);
|
||||
assert.ok(mediaArtRow?.coverBlobHash);
|
||||
assert.equal(
|
||||
parseCoverBlobReference(normalizeCoverBlobBytes(mediaArtRow?.coverBlob)),
|
||||
mediaArtRow?.coverBlobHash,
|
||||
);
|
||||
|
||||
const sharedBlobRow = db
|
||||
.prepare('SELECT cover_blob AS coverBlob FROM imm_cover_art_blobs WHERE blob_hash = ?')
|
||||
.get(mediaArtRow?.coverBlobHash) as {
|
||||
coverBlob: ArrayBuffer | Uint8Array | Buffer;
|
||||
} | null;
|
||||
|
||||
assert.ok(sharedBlobRow);
|
||||
assert.equal(normalizeCoverBlobBytes(sharedBlobRow?.coverBlob)?.toString('hex'), 'deadbeef');
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('anime rows are reused by normalized parsed title and upgraded with AniList metadata', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
|
||||
const firstVideoId = getOrCreateVideoRecord(db, 'local:/tmp/lwa-s02e05.mkv', {
|
||||
canonicalTitle: 'Episode 5',
|
||||
sourcePath: '/tmp/Little Witch Academia S02E05.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const secondVideoId = getOrCreateVideoRecord(db, 'local:/tmp/lwa-s02e06.mkv', {
|
||||
canonicalTitle: 'Episode 6',
|
||||
sourcePath: '/tmp/Little Witch Academia S02E06.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
|
||||
const provisionalAnimeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
canonicalTitle: 'Little Witch Academia',
|
||||
anilistId: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
titleNative: null,
|
||||
metadataJson: '{"source":"parsed"}',
|
||||
});
|
||||
linkVideoToAnimeRecord(db, firstVideoId, {
|
||||
animeId: provisionalAnimeId,
|
||||
parsedBasename: 'Little Witch Academia S02E05.mkv',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 5,
|
||||
parserSource: 'fallback',
|
||||
parserConfidence: 0.6,
|
||||
parseMetadataJson: '{"source":"parsed","episode":5}',
|
||||
});
|
||||
|
||||
const reusedAnimeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: ' little witch academia ',
|
||||
canonicalTitle: 'Little Witch Academia',
|
||||
anilistId: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
titleNative: null,
|
||||
metadataJson: '{"source":"parsed"}',
|
||||
});
|
||||
linkVideoToAnimeRecord(db, secondVideoId, {
|
||||
animeId: reusedAnimeId,
|
||||
parsedBasename: 'Little Witch Academia S02E06.mkv',
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
parsedSeason: 2,
|
||||
parsedEpisode: 6,
|
||||
parserSource: 'fallback',
|
||||
parserConfidence: 0.6,
|
||||
parseMetadataJson: '{"source":"parsed","episode":6}',
|
||||
});
|
||||
|
||||
assert.equal(reusedAnimeId, provisionalAnimeId);
|
||||
|
||||
const upgradedAnimeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: 'Little Witch Academia',
|
||||
canonicalTitle: 'Little Witch Academia TV',
|
||||
anilistId: 33_435,
|
||||
titleRomaji: 'Little Witch Academia',
|
||||
titleEnglish: 'Little Witch Academia',
|
||||
titleNative: 'リトルウィッチアカデミア',
|
||||
metadataJson: '{"source":"anilist"}',
|
||||
});
|
||||
|
||||
assert.equal(upgradedAnimeId, provisionalAnimeId);
|
||||
|
||||
const animeRows = db.prepare('SELECT * FROM imm_anime').all() as Array<{
|
||||
anime_id: number;
|
||||
normalized_title_key: string;
|
||||
canonical_title: string;
|
||||
anilist_id: number | null;
|
||||
title_romaji: string | null;
|
||||
title_english: string | null;
|
||||
title_native: string | null;
|
||||
metadata_json: string | null;
|
||||
}>;
|
||||
assert.equal(animeRows.length, 1);
|
||||
assert.equal(animeRows[0]?.anime_id, provisionalAnimeId);
|
||||
assert.equal(animeRows[0]?.normalized_title_key, 'little witch academia');
|
||||
assert.equal(animeRows[0]?.canonical_title, 'Little Witch Academia TV');
|
||||
assert.equal(animeRows[0]?.anilist_id, 33_435);
|
||||
assert.equal(animeRows[0]?.title_romaji, 'Little Witch Academia');
|
||||
assert.equal(animeRows[0]?.title_english, 'Little Witch Academia');
|
||||
assert.equal(animeRows[0]?.title_native, 'リトルウィッチアカデミア');
|
||||
assert.equal(animeRows[0]?.metadata_json, '{"source":"anilist"}');
|
||||
|
||||
const linkedVideos = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT anime_id, parsed_title, parsed_season, parsed_episode
|
||||
FROM imm_videos
|
||||
WHERE video_id IN (?, ?)
|
||||
ORDER BY video_id
|
||||
`,
|
||||
)
|
||||
.all(firstVideoId, secondVideoId) as Array<{
|
||||
anime_id: number | null;
|
||||
parsed_title: string | null;
|
||||
parsed_season: number | null;
|
||||
parsed_episode: number | null;
|
||||
}>;
|
||||
|
||||
assert.deepEqual(linkedVideos, [
|
||||
{
|
||||
anime_id: provisionalAnimeId,
|
||||
parsed_title: 'Little Witch Academia',
|
||||
parsed_season: 2,
|
||||
parsed_episode: 5,
|
||||
},
|
||||
{
|
||||
anime_id: provisionalAnimeId,
|
||||
parsed_title: 'Little Witch Academia',
|
||||
parsed_season: 2,
|
||||
parsed_episode: 6,
|
||||
},
|
||||
]);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('start/finalize session updates ended_at and status', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
@@ -116,6 +740,39 @@ test('start/finalize session updates ended_at and status', () => {
|
||||
}
|
||||
});
|
||||
|
||||
test('finalize session persists ended media position', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/slice-a-ended-media.mkv', {
|
||||
canonicalTitle: 'Slice A Ended Media',
|
||||
sourcePath: '/tmp/slice-a-ended-media.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const startedAtMs = 1_234_567_000;
|
||||
const endedAtMs = startedAtMs + 8_500;
|
||||
const { sessionId, state } = startSessionRecord(db, videoId, startedAtMs);
|
||||
state.lastMediaMs = 91_000;
|
||||
|
||||
finalizeSessionRecord(db, state, endedAtMs);
|
||||
|
||||
const row = db
|
||||
.prepare('SELECT ended_media_ms FROM imm_sessions WHERE session_id = ?')
|
||||
.get(sessionId) as {
|
||||
ended_media_ms: number | null;
|
||||
} | null;
|
||||
|
||||
assert.ok(row);
|
||||
assert.equal(row?.ended_media_ms, 91_000);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('executeQueuedWrite inserts event and telemetry rows', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
@@ -139,11 +796,11 @@ test('executeQueuedWrite inserts event and telemetry rows', () => {
|
||||
totalWatchedMs: 1_000,
|
||||
activeWatchedMs: 900,
|
||||
linesSeen: 3,
|
||||
wordsSeen: 6,
|
||||
tokensSeen: 6,
|
||||
cardsMined: 1,
|
||||
lookupCount: 2,
|
||||
lookupHits: 1,
|
||||
yomitanLookupCount: 0,
|
||||
pauseCount: 1,
|
||||
pauseMs: 50,
|
||||
seekForwardCount: 0,
|
||||
@@ -161,7 +818,7 @@ test('executeQueuedWrite inserts event and telemetry rows', () => {
|
||||
lineIndex: 1,
|
||||
segmentStartMs: 0,
|
||||
segmentEndMs: 800,
|
||||
wordsDelta: 2,
|
||||
tokensDelta: 2,
|
||||
cardsDelta: 0,
|
||||
payloadJson: '{"event":"subtitle-line"}',
|
||||
},
|
||||
@@ -191,18 +848,22 @@ test('executeQueuedWrite inserts and upserts word and kanji rows', () => {
|
||||
ensureSchema(db);
|
||||
const stmts = createTrackerPreparedStatements(db);
|
||||
|
||||
stmts.wordUpsertStmt.run('猫', '猫', '', 10.0, 10.0);
|
||||
stmts.wordUpsertStmt.run('猫', '猫', '', 5.0, 15.0);
|
||||
stmts.wordUpsertStmt.run('猫', '猫', '', 'noun', '名詞', '一般', '', 10.0, 10.0);
|
||||
stmts.wordUpsertStmt.run('猫', '猫', '', 'noun', '名詞', '一般', '', 5.0, 15.0);
|
||||
stmts.kanjiUpsertStmt.run('日', 9.0, 9.0);
|
||||
stmts.kanjiUpsertStmt.run('日', 8.0, 11.0);
|
||||
|
||||
const wordRow = db
|
||||
.prepare(
|
||||
'SELECT headword, frequency, first_seen, last_seen FROM imm_words WHERE headword = ?',
|
||||
`SELECT headword, frequency, part_of_speech, pos1, pos2, first_seen, last_seen
|
||||
FROM imm_words WHERE headword = ?`,
|
||||
)
|
||||
.get('猫') as {
|
||||
headword: string;
|
||||
frequency: number;
|
||||
part_of_speech: string;
|
||||
pos1: string;
|
||||
pos2: string;
|
||||
first_seen: number;
|
||||
last_seen: number;
|
||||
} | null;
|
||||
@@ -218,6 +879,9 @@ test('executeQueuedWrite inserts and upserts word and kanji rows', () => {
|
||||
assert.ok(wordRow);
|
||||
assert.ok(kanjiRow);
|
||||
assert.equal(wordRow?.frequency, 2);
|
||||
assert.equal(wordRow?.part_of_speech, 'noun');
|
||||
assert.equal(wordRow?.pos1, '名詞');
|
||||
assert.equal(wordRow?.pos2, '一般');
|
||||
assert.equal(kanjiRow?.frequency, 2);
|
||||
assert.equal(wordRow?.first_seen, 5);
|
||||
assert.equal(wordRow?.last_seen, 15);
|
||||
@@ -228,3 +892,54 @@ test('executeQueuedWrite inserts and upserts word and kanji rows', () => {
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('word upsert replaces legacy other part_of_speech when better POS metadata arrives later', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const stmts = createTrackerPreparedStatements(db);
|
||||
|
||||
stmts.wordUpsertStmt.run(
|
||||
'知っている',
|
||||
'知っている',
|
||||
'しっている',
|
||||
'other',
|
||||
'動詞',
|
||||
'自立',
|
||||
'',
|
||||
10,
|
||||
10,
|
||||
);
|
||||
stmts.wordUpsertStmt.run(
|
||||
'知っている',
|
||||
'知っている',
|
||||
'しっている',
|
||||
'verb',
|
||||
'動詞',
|
||||
'自立',
|
||||
'',
|
||||
11,
|
||||
12,
|
||||
);
|
||||
|
||||
const row = db
|
||||
.prepare('SELECT frequency, part_of_speech, pos1, pos2 FROM imm_words WHERE headword = ?')
|
||||
.get('知っている') as {
|
||||
frequency: number;
|
||||
part_of_speech: string;
|
||||
pos1: string;
|
||||
pos2: string;
|
||||
} | null;
|
||||
|
||||
assert.ok(row);
|
||||
assert.equal(row?.frequency, 2);
|
||||
assert.equal(row?.part_of_speech, 'verb');
|
||||
assert.equal(row?.pos1, '動詞');
|
||||
assert.equal(row?.pos2, '自立');
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
export const SCHEMA_VERSION = 3;
|
||||
export const SCHEMA_VERSION = 15;
|
||||
export const DEFAULT_QUEUE_CAP = 1_000;
|
||||
export const DEFAULT_BATCH_SIZE = 25;
|
||||
export const DEFAULT_FLUSH_INTERVAL_MS = 500;
|
||||
@@ -7,6 +7,7 @@ const ONE_WEEK_MS = 7 * 24 * 60 * 60 * 1000;
|
||||
export const DEFAULT_EVENTS_RETENTION_MS = ONE_WEEK_MS;
|
||||
export const DEFAULT_VACUUM_INTERVAL_MS = ONE_WEEK_MS;
|
||||
export const DEFAULT_TELEMETRY_RETENTION_MS = 30 * 24 * 60 * 60 * 1000;
|
||||
export const DEFAULT_SESSIONS_RETENTION_MS = 30 * 24 * 60 * 60 * 1000;
|
||||
export const DEFAULT_DAILY_ROLLUP_RETENTION_MS = 365 * 24 * 60 * 60 * 1000;
|
||||
export const DEFAULT_MONTHLY_ROLLUP_RETENTION_MS = 5 * 365 * 24 * 60 * 60 * 1000;
|
||||
export const DEFAULT_MAX_PAYLOAD_BYTES = 256;
|
||||
@@ -25,10 +26,14 @@ export const EVENT_SEEK_FORWARD = 5;
|
||||
export const EVENT_SEEK_BACKWARD = 6;
|
||||
export const EVENT_PAUSE_START = 7;
|
||||
export const EVENT_PAUSE_END = 8;
|
||||
export const EVENT_YOMITAN_LOOKUP = 9;
|
||||
|
||||
export interface ImmersionTrackerOptions {
|
||||
dbPath: string;
|
||||
policy?: ImmersionTrackerPolicy;
|
||||
resolveLegacyVocabularyPos?: (
|
||||
row: LegacyVocabularyPosRow,
|
||||
) => Promise<LegacyVocabularyPosResolution | null>;
|
||||
}
|
||||
|
||||
export interface ImmersionTrackerPolicy {
|
||||
@@ -40,6 +45,7 @@ export interface ImmersionTrackerPolicy {
|
||||
retention?: {
|
||||
eventsDays?: number;
|
||||
telemetryDays?: number;
|
||||
sessionsDays?: number;
|
||||
dailyRollupsDays?: number;
|
||||
monthlyRollupsDays?: number;
|
||||
vacuumIntervalDays?: number;
|
||||
@@ -50,11 +56,11 @@ export interface TelemetryAccumulator {
|
||||
totalWatchedMs: number;
|
||||
activeWatchedMs: number;
|
||||
linesSeen: number;
|
||||
wordsSeen: number;
|
||||
tokensSeen: number;
|
||||
cardsMined: number;
|
||||
lookupCount: number;
|
||||
lookupHits: number;
|
||||
yomitanLookupCount: number;
|
||||
pauseCount: number;
|
||||
pauseMs: number;
|
||||
seekForwardCount: number;
|
||||
@@ -72,20 +78,22 @@ export interface SessionState extends TelemetryAccumulator {
|
||||
lastPauseStartMs: number | null;
|
||||
isPaused: boolean;
|
||||
pendingTelemetry: boolean;
|
||||
markedWatched: boolean;
|
||||
}
|
||||
|
||||
interface QueuedTelemetryWrite {
|
||||
kind: 'telemetry';
|
||||
sessionId: number;
|
||||
sampleMs?: number;
|
||||
lastMediaMs?: number | null;
|
||||
totalWatchedMs?: number;
|
||||
activeWatchedMs?: number;
|
||||
linesSeen?: number;
|
||||
wordsSeen?: number;
|
||||
tokensSeen?: number;
|
||||
cardsMined?: number;
|
||||
lookupCount?: number;
|
||||
lookupHits?: number;
|
||||
yomitanLookupCount?: number;
|
||||
pauseCount?: number;
|
||||
pauseMs?: number;
|
||||
seekForwardCount?: number;
|
||||
@@ -95,7 +103,7 @@ interface QueuedTelemetryWrite {
|
||||
lineIndex?: number | null;
|
||||
segmentStartMs?: number | null;
|
||||
segmentEndMs?: number | null;
|
||||
wordsDelta?: number;
|
||||
tokensDelta?: number;
|
||||
cardsDelta?: number;
|
||||
payloadJson?: string | null;
|
||||
}
|
||||
@@ -108,7 +116,7 @@ interface QueuedEventWrite {
|
||||
lineIndex?: number | null;
|
||||
segmentStartMs?: number | null;
|
||||
segmentEndMs?: number | null;
|
||||
wordsDelta?: number;
|
||||
tokensDelta?: number;
|
||||
cardsDelta?: number;
|
||||
payloadJson?: string | null;
|
||||
}
|
||||
@@ -118,8 +126,13 @@ interface QueuedWordWrite {
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
partOfSpeech: string;
|
||||
pos1: string;
|
||||
pos2: string;
|
||||
pos3: string;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
frequencyRank: number | null;
|
||||
}
|
||||
|
||||
interface QueuedKanjiWrite {
|
||||
@@ -129,11 +142,44 @@ interface QueuedKanjiWrite {
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export interface CountedWordOccurrence {
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
partOfSpeech: string;
|
||||
pos1: string;
|
||||
pos2: string;
|
||||
pos3: string;
|
||||
occurrenceCount: number;
|
||||
frequencyRank: number | null;
|
||||
}
|
||||
|
||||
export interface CountedKanjiOccurrence {
|
||||
kanji: string;
|
||||
occurrenceCount: number;
|
||||
}
|
||||
|
||||
interface QueuedSubtitleLineWrite {
|
||||
kind: 'subtitleLine';
|
||||
sessionId: number;
|
||||
videoId: number;
|
||||
lineIndex: number;
|
||||
segmentStartMs: number | null;
|
||||
segmentEndMs: number | null;
|
||||
text: string;
|
||||
secondaryText?: string | null;
|
||||
wordOccurrences: CountedWordOccurrence[];
|
||||
kanjiOccurrences: CountedKanjiOccurrence[];
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export type QueuedWrite =
|
||||
| QueuedTelemetryWrite
|
||||
| QueuedEventWrite
|
||||
| QueuedWordWrite
|
||||
| QueuedKanjiWrite;
|
||||
| QueuedKanjiWrite
|
||||
| QueuedSubtitleLineWrite;
|
||||
|
||||
export interface VideoMetadata {
|
||||
sourceType: number;
|
||||
@@ -152,18 +198,173 @@ export interface VideoMetadata {
|
||||
metadataJson: string | null;
|
||||
}
|
||||
|
||||
export interface ParsedAnimeVideoMetadata {
|
||||
animeId: number | null;
|
||||
parsedBasename: string | null;
|
||||
parsedTitle: string | null;
|
||||
parsedSeason: number | null;
|
||||
parsedEpisode: number | null;
|
||||
parserSource: string | null;
|
||||
parserConfidence: number | null;
|
||||
parseMetadataJson: string | null;
|
||||
}
|
||||
|
||||
export interface ParsedAnimeVideoGuess {
|
||||
parsedBasename: string | null;
|
||||
parsedTitle: string;
|
||||
parsedSeason: number | null;
|
||||
parsedEpisode: number | null;
|
||||
parserSource: 'guessit' | 'fallback';
|
||||
parserConfidence: number;
|
||||
parseMetadataJson: string;
|
||||
}
|
||||
|
||||
export interface SessionSummaryQueryRow {
|
||||
sessionId: number;
|
||||
videoId: number | null;
|
||||
canonicalTitle: string | null;
|
||||
animeId: number | null;
|
||||
animeTitle: string | null;
|
||||
startedAtMs: number;
|
||||
endedAtMs: number | null;
|
||||
totalWatchedMs: number;
|
||||
activeWatchedMs: number;
|
||||
linesSeen: number;
|
||||
wordsSeen: number;
|
||||
tokensSeen: number;
|
||||
cardsMined: number;
|
||||
lookupCount: number;
|
||||
lookupHits: number;
|
||||
yomitanLookupCount: number;
|
||||
knownWordsSeen?: number;
|
||||
knownWordRate?: number;
|
||||
}
|
||||
|
||||
export interface LifetimeGlobalRow {
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
activeDays: number;
|
||||
episodesStarted: number;
|
||||
episodesCompleted: number;
|
||||
animeCompleted: number;
|
||||
lastRebuiltMs: number | null;
|
||||
}
|
||||
|
||||
export interface LifetimeAnimeRow {
|
||||
animeId: number;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalLinesSeen: number;
|
||||
totalTokensSeen: number;
|
||||
episodesStarted: number;
|
||||
episodesCompleted: number;
|
||||
firstWatchedMs: number | null;
|
||||
lastWatchedMs: number | null;
|
||||
}
|
||||
|
||||
export interface LifetimeMediaRow {
|
||||
videoId: number;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalLinesSeen: number;
|
||||
totalTokensSeen: number;
|
||||
completed: number;
|
||||
firstWatchedMs: number | null;
|
||||
lastWatchedMs: number | null;
|
||||
}
|
||||
|
||||
export interface AppliedSessionRow {
|
||||
sessionId: number;
|
||||
appliedAtMs: number;
|
||||
}
|
||||
|
||||
export interface LifetimeRebuildSummary {
|
||||
appliedSessions: number;
|
||||
rebuiltAtMs: number;
|
||||
}
|
||||
|
||||
export interface VocabularyStatsRow {
|
||||
wordId: number;
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
partOfSpeech: string | null;
|
||||
pos1: string | null;
|
||||
pos2: string | null;
|
||||
pos3: string | null;
|
||||
frequency: number;
|
||||
frequencyRank: number | null;
|
||||
animeCount: number;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export interface VocabularyCleanupSummary {
|
||||
scanned: number;
|
||||
kept: number;
|
||||
deleted: number;
|
||||
repaired: number;
|
||||
}
|
||||
|
||||
export interface LegacyVocabularyPosRow {
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string | null;
|
||||
}
|
||||
|
||||
export interface LegacyVocabularyPosResolution {
|
||||
headword: string;
|
||||
reading: string;
|
||||
partOfSpeech: string;
|
||||
pos1: string;
|
||||
pos2: string;
|
||||
pos3: string;
|
||||
}
|
||||
|
||||
export interface KanjiStatsRow {
|
||||
kanjiId: number;
|
||||
kanji: string;
|
||||
frequency: number;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export interface WordOccurrenceRow {
|
||||
animeId: number | null;
|
||||
animeTitle: string | null;
|
||||
videoId: number;
|
||||
videoTitle: string;
|
||||
sourcePath: string | null;
|
||||
secondaryText: string | null;
|
||||
sessionId: number;
|
||||
lineIndex: number;
|
||||
segmentStartMs: number | null;
|
||||
segmentEndMs: number | null;
|
||||
text: string;
|
||||
occurrenceCount: number;
|
||||
}
|
||||
|
||||
export interface KanjiOccurrenceRow {
|
||||
animeId: number | null;
|
||||
animeTitle: string | null;
|
||||
videoId: number;
|
||||
videoTitle: string;
|
||||
sourcePath: string | null;
|
||||
secondaryText: string | null;
|
||||
sessionId: number;
|
||||
lineIndex: number;
|
||||
segmentStartMs: number | null;
|
||||
segmentEndMs: number | null;
|
||||
text: string;
|
||||
occurrenceCount: number;
|
||||
}
|
||||
|
||||
export interface SessionEventRow {
|
||||
eventType: number;
|
||||
tsMs: number;
|
||||
payload: string | null;
|
||||
}
|
||||
|
||||
export interface SessionTimelineRow {
|
||||
@@ -171,7 +372,6 @@ export interface SessionTimelineRow {
|
||||
totalWatchedMs: number;
|
||||
activeWatchedMs: number;
|
||||
linesSeen: number;
|
||||
wordsSeen: number;
|
||||
tokensSeen: number;
|
||||
cardsMined: number;
|
||||
}
|
||||
@@ -182,11 +382,10 @@ export interface ImmersionSessionRollupRow {
|
||||
totalSessions: number;
|
||||
totalActiveMin: number;
|
||||
totalLinesSeen: number;
|
||||
totalWordsSeen: number;
|
||||
totalTokensSeen: number;
|
||||
totalCards: number;
|
||||
cardsPerHour: number | null;
|
||||
wordsPerMin: number | null;
|
||||
tokensPerMin: number | null;
|
||||
lookupHitRate: number | null;
|
||||
}
|
||||
|
||||
@@ -200,3 +399,186 @@ export interface ProbeMetadata {
|
||||
bitrateKbps: number | null;
|
||||
audioCodecId: number | null;
|
||||
}
|
||||
|
||||
export interface MediaArtRow {
|
||||
videoId: number;
|
||||
anilistId: number | null;
|
||||
coverUrl: string | null;
|
||||
coverBlob: Buffer | null;
|
||||
titleRomaji: string | null;
|
||||
titleEnglish: string | null;
|
||||
episodesTotal: number | null;
|
||||
fetchedAtMs: number;
|
||||
}
|
||||
|
||||
export interface MediaLibraryRow {
|
||||
videoId: number;
|
||||
canonicalTitle: string;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalTokensSeen: number;
|
||||
lastWatchedMs: number;
|
||||
hasCoverArt: number;
|
||||
}
|
||||
|
||||
export interface MediaDetailRow {
|
||||
videoId: number;
|
||||
canonicalTitle: string;
|
||||
animeId: number | null;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalTokensSeen: number;
|
||||
totalLinesSeen: number;
|
||||
totalLookupCount: number;
|
||||
totalLookupHits: number;
|
||||
totalYomitanLookupCount: number;
|
||||
}
|
||||
|
||||
export interface AnimeLibraryRow {
|
||||
animeId: number;
|
||||
canonicalTitle: string;
|
||||
anilistId: number | null;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalTokensSeen: number;
|
||||
episodeCount: number;
|
||||
episodesTotal: number | null;
|
||||
lastWatchedMs: number;
|
||||
}
|
||||
|
||||
export interface AnimeDetailRow {
|
||||
animeId: number;
|
||||
canonicalTitle: string;
|
||||
anilistId: number | null;
|
||||
titleRomaji: string | null;
|
||||
titleEnglish: string | null;
|
||||
titleNative: string | null;
|
||||
description: string | null;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalTokensSeen: number;
|
||||
totalLinesSeen: number;
|
||||
totalLookupCount: number;
|
||||
totalLookupHits: number;
|
||||
totalYomitanLookupCount: number;
|
||||
episodeCount: number;
|
||||
lastWatchedMs: number;
|
||||
}
|
||||
|
||||
export interface AnimeAnilistEntryRow {
|
||||
anilistId: number;
|
||||
titleRomaji: string | null;
|
||||
titleEnglish: string | null;
|
||||
season: number | null;
|
||||
}
|
||||
|
||||
export interface AnimeEpisodeRow {
|
||||
animeId: number;
|
||||
videoId: number;
|
||||
canonicalTitle: string;
|
||||
parsedTitle: string | null;
|
||||
season: number | null;
|
||||
episode: number | null;
|
||||
durationMs: number;
|
||||
endedMediaMs: number | null;
|
||||
watched: number;
|
||||
totalSessions: number;
|
||||
totalActiveMs: number;
|
||||
totalCards: number;
|
||||
totalTokensSeen: number;
|
||||
totalYomitanLookupCount: number;
|
||||
lastWatchedMs: number;
|
||||
}
|
||||
|
||||
export interface StreakCalendarRow {
|
||||
epochDay: number;
|
||||
totalActiveMin: number;
|
||||
}
|
||||
|
||||
export interface AnimeWordRow {
|
||||
wordId: number;
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
partOfSpeech: string | null;
|
||||
frequency: number;
|
||||
}
|
||||
|
||||
export interface EpisodesPerDayRow {
|
||||
epochDay: number;
|
||||
episodeCount: number;
|
||||
}
|
||||
|
||||
export interface NewAnimePerDayRow {
|
||||
epochDay: number;
|
||||
newAnimeCount: number;
|
||||
}
|
||||
|
||||
export interface WatchTimePerAnimeRow {
|
||||
epochDay: number;
|
||||
animeId: number;
|
||||
animeTitle: string;
|
||||
totalActiveMin: number;
|
||||
}
|
||||
|
||||
export interface WordDetailRow {
|
||||
wordId: number;
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
partOfSpeech: string | null;
|
||||
pos1: string | null;
|
||||
pos2: string | null;
|
||||
pos3: string | null;
|
||||
frequency: number;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export interface WordAnimeAppearanceRow {
|
||||
animeId: number;
|
||||
animeTitle: string;
|
||||
occurrenceCount: number;
|
||||
}
|
||||
|
||||
export interface SimilarWordRow {
|
||||
wordId: number;
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
frequency: number;
|
||||
}
|
||||
|
||||
export interface KanjiDetailRow {
|
||||
kanjiId: number;
|
||||
kanji: string;
|
||||
frequency: number;
|
||||
firstSeen: number;
|
||||
lastSeen: number;
|
||||
}
|
||||
|
||||
export interface KanjiAnimeAppearanceRow {
|
||||
animeId: number;
|
||||
animeTitle: string;
|
||||
occurrenceCount: number;
|
||||
}
|
||||
|
||||
export interface KanjiWordRow {
|
||||
wordId: number;
|
||||
headword: string;
|
||||
word: string;
|
||||
reading: string;
|
||||
frequency: number;
|
||||
}
|
||||
|
||||
export interface EpisodeCardEventRow {
|
||||
eventId: number;
|
||||
sessionId: number;
|
||||
tsMs: number;
|
||||
cardsDelta: number;
|
||||
noteIds: number[];
|
||||
}
|
||||
|
||||
@@ -29,7 +29,10 @@ export {
|
||||
} from './startup';
|
||||
export { openYomitanSettingsWindow } from './yomitan-settings';
|
||||
export { createTokenizerDepsRuntime, tokenizeSubtitle } from './tokenizer';
|
||||
export { clearYomitanParserCachesForWindow } from './tokenizer/yomitan-parser-runtime';
|
||||
export {
|
||||
addYomitanNoteViaSearch,
|
||||
clearYomitanParserCachesForWindow,
|
||||
} from './tokenizer/yomitan-parser-runtime';
|
||||
export {
|
||||
deleteYomitanDictionaryByTitle,
|
||||
getYomitanDictionaryInfo,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
|
||||
import { createIpcDepsRuntime, registerIpcHandlers } from './ipc';
|
||||
import { createIpcDepsRuntime, registerIpcHandlers, type IpcServiceDeps } from './ipc';
|
||||
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
|
||||
|
||||
interface FakeIpcRegistrar {
|
||||
@@ -77,6 +77,90 @@ function createControllerConfigFixture() {
|
||||
};
|
||||
}
|
||||
|
||||
function createRegisterIpcDeps(overrides: Partial<IpcServiceDeps> = {}): IpcServiceDeps {
|
||||
return {
|
||||
onOverlayModalClosed: () => {},
|
||||
openYomitanSettings: () => {},
|
||||
quitApp: () => {},
|
||||
toggleDevTools: () => {},
|
||||
getVisibleOverlayVisibility: () => false,
|
||||
toggleVisibleOverlay: () => {},
|
||||
tokenizeCurrentSubtitle: async () => null,
|
||||
getCurrentSubtitleRaw: () => '',
|
||||
getCurrentSubtitleAss: () => '',
|
||||
getPlaybackPaused: () => false,
|
||||
getSubtitlePosition: () => null,
|
||||
getSubtitleStyle: () => null,
|
||||
saveSubtitlePosition: () => {},
|
||||
getMecabStatus: () => ({ available: false, enabled: false, path: null }),
|
||||
setMecabEnabled: () => {},
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: async () => {},
|
||||
saveControllerPreference: async () => {},
|
||||
getSecondarySubMode: () => 'hover',
|
||||
getCurrentSecondarySub: () => '',
|
||||
focusMainWindow: () => {},
|
||||
runSubsyncManual: async () => ({ ok: true, message: 'ok' }),
|
||||
getAnkiConnectStatus: () => false,
|
||||
getRuntimeOptions: () => [],
|
||||
setRuntimeOption: () => ({ ok: true }),
|
||||
cycleRuntimeOption: () => ({ ok: true }),
|
||||
reportOverlayContentBounds: () => {},
|
||||
getAnilistStatus: () => ({}),
|
||||
clearAnilistToken: () => {},
|
||||
openAnilistSetup: () => {},
|
||||
getAnilistQueueStatus: () => ({}),
|
||||
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
|
||||
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
|
||||
immersionTracker: null,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createFakeImmersionTracker(
|
||||
overrides: Partial<NonNullable<IpcServiceDeps['immersionTracker']>> = {},
|
||||
): NonNullable<IpcServiceDeps['immersionTracker']> {
|
||||
return {
|
||||
recordYomitanLookup: () => {},
|
||||
getSessionSummaries: async () => [],
|
||||
getDailyRollups: async () => [],
|
||||
getMonthlyRollups: async () => [],
|
||||
getQueryHints: async () => ({
|
||||
totalSessions: 0,
|
||||
activeSessions: 0,
|
||||
episodesToday: 0,
|
||||
activeAnimeCount: 0,
|
||||
totalActiveMin: 0,
|
||||
totalCards: 0,
|
||||
activeDays: 0,
|
||||
totalEpisodesWatched: 0,
|
||||
totalAnimeCompleted: 0,
|
||||
totalTokensSeen: 0,
|
||||
totalLookupCount: 0,
|
||||
totalLookupHits: 0,
|
||||
totalYomitanLookupCount: 0,
|
||||
newWordsToday: 0,
|
||||
newWordsThisWeek: 0,
|
||||
}),
|
||||
getSessionTimeline: async () => [],
|
||||
getSessionEvents: async () => [],
|
||||
getVocabularyStats: async () => [],
|
||||
getKanjiStats: async () => [],
|
||||
getMediaLibrary: async () => [],
|
||||
getMediaDetail: async () => null,
|
||||
getMediaSessions: async () => [],
|
||||
getMediaDailyRollups: async () => [],
|
||||
getCoverArt: async () => null,
|
||||
markActiveVideoWatched: async () => false,
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
test('createIpcDepsRuntime wires AniList handlers', async () => {
|
||||
const calls: string[] = [];
|
||||
const deps = createIpcDepsRuntime({
|
||||
@@ -97,6 +181,8 @@ test('createIpcDepsRuntime wires AniList handlers', async () => {
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: () => {},
|
||||
saveControllerPreference: () => {},
|
||||
@@ -164,6 +250,8 @@ test('registerIpcHandlers rejects malformed runtime-option payloads', async () =
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: () => {},
|
||||
saveControllerPreference: () => {},
|
||||
@@ -232,6 +320,194 @@ test('registerIpcHandlers rejects malformed runtime-option payloads', async () =
|
||||
);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers forwards yomitan lookup tracking commands to immersion tracker', () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
const calls: string[] = [];
|
||||
registerIpcHandlers(
|
||||
createRegisterIpcDeps({
|
||||
immersionTracker: createFakeImmersionTracker({
|
||||
recordYomitanLookup: () => {
|
||||
calls.push('lookup');
|
||||
},
|
||||
}),
|
||||
}),
|
||||
registrar,
|
||||
);
|
||||
|
||||
const handler = handlers.on.get(IPC_CHANNELS.command.recordYomitanLookup);
|
||||
assert.equal(typeof handler, 'function');
|
||||
|
||||
handler?.({}, null);
|
||||
|
||||
assert.deepEqual(calls, ['lookup']);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers returns empty stats overview shape without a tracker', async () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
registerIpcHandlers(createRegisterIpcDeps(), registrar);
|
||||
|
||||
const overviewHandler = handlers.handle.get(IPC_CHANNELS.request.statsGetOverview);
|
||||
assert.ok(overviewHandler);
|
||||
assert.deepEqual(await overviewHandler!({}), {
|
||||
sessions: [],
|
||||
rollups: [],
|
||||
hints: {
|
||||
totalSessions: 0,
|
||||
activeSessions: 0,
|
||||
episodesToday: 0,
|
||||
activeAnimeCount: 0,
|
||||
totalCards: 0,
|
||||
totalActiveMin: 0,
|
||||
activeDays: 0,
|
||||
totalEpisodesWatched: 0,
|
||||
totalAnimeCompleted: 0,
|
||||
totalTokensSeen: 0,
|
||||
totalLookupCount: 0,
|
||||
totalLookupHits: 0,
|
||||
totalYomitanLookupCount: 0,
|
||||
newWordsToday: 0,
|
||||
newWordsThisWeek: 0,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('registerIpcHandlers validates and clamps stats request limits', async () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
const calls: Array<[string, number, number?]> = [];
|
||||
|
||||
registerIpcHandlers(
|
||||
createRegisterIpcDeps({
|
||||
immersionTracker: {
|
||||
recordYomitanLookup: () => {},
|
||||
getSessionSummaries: async (limit = 0) => {
|
||||
calls.push(['sessions', limit]);
|
||||
return [];
|
||||
},
|
||||
getDailyRollups: async (limit = 0) => {
|
||||
calls.push(['daily', limit]);
|
||||
return [];
|
||||
},
|
||||
getMonthlyRollups: async (limit = 0) => {
|
||||
calls.push(['monthly', limit]);
|
||||
return [];
|
||||
},
|
||||
getQueryHints: async () => ({
|
||||
totalSessions: 0,
|
||||
activeSessions: 0,
|
||||
episodesToday: 0,
|
||||
activeAnimeCount: 0,
|
||||
totalCards: 0,
|
||||
totalActiveMin: 0,
|
||||
activeDays: 0,
|
||||
totalEpisodesWatched: 0,
|
||||
totalAnimeCompleted: 0,
|
||||
totalTokensSeen: 0,
|
||||
totalLookupCount: 0,
|
||||
totalLookupHits: 0,
|
||||
totalYomitanLookupCount: 0,
|
||||
newWordsToday: 0,
|
||||
newWordsThisWeek: 0,
|
||||
}),
|
||||
getSessionTimeline: async (sessionId: number, limit = 0) => {
|
||||
calls.push(['timeline', limit, sessionId]);
|
||||
return [];
|
||||
},
|
||||
getSessionEvents: async (sessionId: number, limit = 0) => {
|
||||
calls.push(['events', limit, sessionId]);
|
||||
return [];
|
||||
},
|
||||
getVocabularyStats: async (limit = 0) => {
|
||||
calls.push(['vocabulary', limit]);
|
||||
return [];
|
||||
},
|
||||
getKanjiStats: async (limit = 0) => {
|
||||
calls.push(['kanji', limit]);
|
||||
return [];
|
||||
},
|
||||
getMediaLibrary: async () => [],
|
||||
getMediaDetail: async () => null,
|
||||
getMediaSessions: async () => [],
|
||||
getMediaDailyRollups: async () => [],
|
||||
getCoverArt: async () => null,
|
||||
markActiveVideoWatched: async () => false,
|
||||
},
|
||||
}),
|
||||
registrar,
|
||||
);
|
||||
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetDailyRollups)!({}, -1);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetMonthlyRollups)!(
|
||||
{},
|
||||
Number.POSITIVE_INFINITY,
|
||||
);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetSessions)!({}, 9999);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetSessionTimeline)!({}, 7, 12.5);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetSessionEvents)!({}, 7, 0);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetVocabulary)!({}, 1000);
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetKanji)!({}, NaN);
|
||||
|
||||
assert.deepEqual(calls, [
|
||||
['daily', 60],
|
||||
['monthly', 24],
|
||||
['sessions', 500],
|
||||
['timeline', 200, 7],
|
||||
['events', 500, 7],
|
||||
['vocabulary', 500],
|
||||
['kanji', 100],
|
||||
]);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers requests the full timeline when no limit is provided', async () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
const calls: Array<[string, number | undefined, number]> = [];
|
||||
|
||||
registerIpcHandlers(
|
||||
createRegisterIpcDeps({
|
||||
immersionTracker: {
|
||||
recordYomitanLookup: () => {},
|
||||
getSessionSummaries: async () => [],
|
||||
getDailyRollups: async () => [],
|
||||
getMonthlyRollups: async () => [],
|
||||
getQueryHints: async () => ({
|
||||
totalSessions: 0,
|
||||
activeSessions: 0,
|
||||
episodesToday: 0,
|
||||
activeAnimeCount: 0,
|
||||
totalCards: 0,
|
||||
totalActiveMin: 0,
|
||||
activeDays: 0,
|
||||
totalEpisodesWatched: 0,
|
||||
totalAnimeCompleted: 0,
|
||||
totalTokensSeen: 0,
|
||||
totalLookupCount: 0,
|
||||
totalLookupHits: 0,
|
||||
totalYomitanLookupCount: 0,
|
||||
newWordsToday: 0,
|
||||
newWordsThisWeek: 0,
|
||||
}),
|
||||
getSessionTimeline: async (sessionId: number, limit?: number) => {
|
||||
calls.push(['timeline', limit, sessionId]);
|
||||
return [];
|
||||
},
|
||||
getSessionEvents: async () => [],
|
||||
getVocabularyStats: async () => [],
|
||||
getKanjiStats: async () => [],
|
||||
getMediaLibrary: async () => [],
|
||||
getMediaDetail: async () => null,
|
||||
getMediaSessions: async () => [],
|
||||
getMediaDailyRollups: async () => [],
|
||||
getCoverArt: async () => null,
|
||||
markActiveVideoWatched: async () => false,
|
||||
},
|
||||
}),
|
||||
registrar,
|
||||
);
|
||||
|
||||
await handlers.handle.get(IPC_CHANNELS.request.statsGetSessionTimeline)!({}, 7, undefined);
|
||||
|
||||
assert.deepEqual(calls, [['timeline', undefined, 7]]);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers ignores malformed fire-and-forget payloads', () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
const saves: unknown[] = [];
|
||||
@@ -265,10 +541,10 @@ test('registerIpcHandlers ignores malformed fire-and-forget payloads', () => {
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: (update) => {
|
||||
controllerSaves.push(update);
|
||||
},
|
||||
saveControllerConfig: () => {},
|
||||
saveControllerPreference: (update) => {
|
||||
controllerSaves.push(update);
|
||||
},
|
||||
@@ -329,6 +605,8 @@ test('registerIpcHandlers awaits saveControllerPreference through request-respon
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: async () => {},
|
||||
saveControllerPreference: async (update) => {
|
||||
@@ -376,85 +654,6 @@ test('registerIpcHandlers awaits saveControllerPreference through request-respon
|
||||
]);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers awaits saveControllerConfig through request-response IPC', async () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
const controllerConfigSaves: unknown[] = [];
|
||||
registerIpcHandlers(
|
||||
{
|
||||
onOverlayModalClosed: () => {},
|
||||
openYomitanSettings: () => {},
|
||||
quitApp: () => {},
|
||||
toggleDevTools: () => {},
|
||||
getVisibleOverlayVisibility: () => false,
|
||||
toggleVisibleOverlay: () => {},
|
||||
tokenizeCurrentSubtitle: async () => null,
|
||||
getCurrentSubtitleRaw: () => '',
|
||||
getCurrentSubtitleAss: () => '',
|
||||
getPlaybackPaused: () => false,
|
||||
getSubtitlePosition: () => null,
|
||||
getSubtitleStyle: () => null,
|
||||
saveSubtitlePosition: () => {},
|
||||
getMecabStatus: () => ({ available: false, enabled: false, path: null }),
|
||||
setMecabEnabled: () => {},
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: async (update) => {
|
||||
await Promise.resolve();
|
||||
controllerConfigSaves.push(update);
|
||||
},
|
||||
saveControllerPreference: async () => {},
|
||||
getSecondarySubMode: () => 'hover',
|
||||
getCurrentSecondarySub: () => '',
|
||||
focusMainWindow: () => {},
|
||||
runSubsyncManual: async () => ({ ok: true, message: 'ok' }),
|
||||
getAnkiConnectStatus: () => false,
|
||||
getRuntimeOptions: () => [],
|
||||
setRuntimeOption: () => ({ ok: true }),
|
||||
cycleRuntimeOption: () => ({ ok: true }),
|
||||
reportOverlayContentBounds: () => {},
|
||||
getAnilistStatus: () => ({}),
|
||||
clearAnilistToken: () => {},
|
||||
openAnilistSetup: () => {},
|
||||
getAnilistQueueStatus: () => ({}),
|
||||
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
|
||||
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
|
||||
},
|
||||
registrar,
|
||||
);
|
||||
|
||||
const saveHandler = handlers.handle.get(IPC_CHANNELS.command.saveControllerConfig);
|
||||
assert.ok(saveHandler);
|
||||
|
||||
await assert.rejects(
|
||||
async () => {
|
||||
await saveHandler!({}, { bindings: { toggleLookup: { kind: 'button', buttonIndex: -1 } } });
|
||||
},
|
||||
/Invalid controller config payload/,
|
||||
);
|
||||
|
||||
await saveHandler!({}, {
|
||||
preferredGamepadId: 'pad-2',
|
||||
bindings: {
|
||||
toggleLookup: { kind: 'button', buttonIndex: 11 },
|
||||
closeLookup: { kind: 'axis', axisIndex: 4, direction: 'negative' },
|
||||
leftStickHorizontal: { kind: 'axis', axisIndex: 7, dpadFallback: 'none' },
|
||||
},
|
||||
});
|
||||
|
||||
assert.deepEqual(controllerConfigSaves, [
|
||||
{
|
||||
preferredGamepadId: 'pad-2',
|
||||
bindings: {
|
||||
toggleLookup: { kind: 'button', buttonIndex: 11 },
|
||||
closeLookup: { kind: 'axis', axisIndex: 4, direction: 'negative' },
|
||||
leftStickHorizontal: { kind: 'axis', axisIndex: 7, dpadFallback: 'none' },
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('registerIpcHandlers rejects malformed controller preference payloads', async () => {
|
||||
const { registrar, handlers } = createFakeIpcRegistrar();
|
||||
registerIpcHandlers(
|
||||
@@ -477,6 +676,8 @@ test('registerIpcHandlers rejects malformed controller preference payloads', asy
|
||||
handleMpvCommand: () => {},
|
||||
getKeybindings: () => [],
|
||||
getConfiguredShortcuts: () => ({}),
|
||||
getStatsToggleKey: () => 'Backquote',
|
||||
getMarkWatchedKey: () => 'KeyW',
|
||||
getControllerConfig: () => createControllerConfigFixture(),
|
||||
saveControllerConfig: async () => {},
|
||||
saveControllerPreference: async () => {},
|
||||
|
||||
@@ -50,6 +50,8 @@ export interface IpcServiceDeps {
|
||||
handleMpvCommand: (command: Array<string | number>) => void;
|
||||
getKeybindings: () => unknown;
|
||||
getConfiguredShortcuts: () => unknown;
|
||||
getStatsToggleKey: () => string;
|
||||
getMarkWatchedKey: () => string;
|
||||
getControllerConfig: () => ResolvedControllerConfig;
|
||||
saveControllerConfig: (update: ControllerConfigUpdate) => void | Promise<void>;
|
||||
saveControllerPreference: (update: ControllerPreferenceUpdate) => void | Promise<void>;
|
||||
@@ -68,6 +70,39 @@ export interface IpcServiceDeps {
|
||||
getAnilistQueueStatus: () => unknown;
|
||||
retryAnilistQueueNow: () => Promise<{ ok: boolean; message: string }>;
|
||||
appendClipboardVideoToQueue: () => { ok: boolean; message: string };
|
||||
immersionTracker?: {
|
||||
recordYomitanLookup: () => void;
|
||||
getSessionSummaries: (limit?: number) => Promise<unknown>;
|
||||
getDailyRollups: (limit?: number) => Promise<unknown>;
|
||||
getMonthlyRollups: (limit?: number) => Promise<unknown>;
|
||||
getQueryHints: () => Promise<{
|
||||
totalSessions: number;
|
||||
activeSessions: number;
|
||||
episodesToday: number;
|
||||
activeAnimeCount: number;
|
||||
totalActiveMin: number;
|
||||
totalCards: number;
|
||||
activeDays: number;
|
||||
totalEpisodesWatched: number;
|
||||
totalAnimeCompleted: number;
|
||||
totalTokensSeen: number;
|
||||
totalLookupCount: number;
|
||||
totalLookupHits: number;
|
||||
totalYomitanLookupCount: number;
|
||||
newWordsToday: number;
|
||||
newWordsThisWeek: number;
|
||||
}>;
|
||||
getSessionTimeline: (sessionId: number, limit?: number) => Promise<unknown>;
|
||||
getSessionEvents: (sessionId: number, limit?: number) => Promise<unknown>;
|
||||
getVocabularyStats: (limit?: number) => Promise<unknown>;
|
||||
getKanjiStats: (limit?: number) => Promise<unknown>;
|
||||
getMediaLibrary: () => Promise<unknown>;
|
||||
getMediaDetail: (videoId: number) => Promise<unknown>;
|
||||
getMediaSessions: (videoId: number, limit?: number) => Promise<unknown>;
|
||||
getMediaDailyRollups: (videoId: number, limit?: number) => Promise<unknown>;
|
||||
getCoverArt: (videoId: number) => Promise<unknown>;
|
||||
markActiveVideoWatched: () => Promise<boolean>;
|
||||
} | null;
|
||||
}
|
||||
|
||||
interface WindowLike {
|
||||
@@ -116,6 +151,8 @@ export interface IpcDepsRuntimeOptions {
|
||||
handleMpvCommand: (command: Array<string | number>) => void;
|
||||
getKeybindings: () => unknown;
|
||||
getConfiguredShortcuts: () => unknown;
|
||||
getStatsToggleKey: () => string;
|
||||
getMarkWatchedKey: () => string;
|
||||
getControllerConfig: () => ResolvedControllerConfig;
|
||||
saveControllerConfig: (update: ControllerConfigUpdate) => void | Promise<void>;
|
||||
saveControllerPreference: (update: ControllerPreferenceUpdate) => void | Promise<void>;
|
||||
@@ -134,6 +171,7 @@ export interface IpcDepsRuntimeOptions {
|
||||
getAnilistQueueStatus: () => unknown;
|
||||
retryAnilistQueueNow: () => Promise<{ ok: boolean; message: string }>;
|
||||
appendClipboardVideoToQueue: () => { ok: boolean; message: string };
|
||||
getImmersionTracker?: () => IpcServiceDeps['immersionTracker'];
|
||||
}
|
||||
|
||||
export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcServiceDeps {
|
||||
@@ -170,6 +208,8 @@ export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcService
|
||||
handleMpvCommand: options.handleMpvCommand,
|
||||
getKeybindings: options.getKeybindings,
|
||||
getConfiguredShortcuts: options.getConfiguredShortcuts,
|
||||
getStatsToggleKey: options.getStatsToggleKey,
|
||||
getMarkWatchedKey: options.getMarkWatchedKey,
|
||||
getControllerConfig: options.getControllerConfig,
|
||||
saveControllerConfig: options.saveControllerConfig,
|
||||
saveControllerPreference: options.saveControllerPreference,
|
||||
@@ -192,10 +232,31 @@ export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcService
|
||||
getAnilistQueueStatus: options.getAnilistQueueStatus,
|
||||
retryAnilistQueueNow: options.retryAnilistQueueNow,
|
||||
appendClipboardVideoToQueue: options.appendClipboardVideoToQueue,
|
||||
get immersionTracker() {
|
||||
return options.getImmersionTracker?.() ?? null;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar = ipcMain): void {
|
||||
const parsePositiveIntLimit = (
|
||||
value: unknown,
|
||||
defaultValue: number,
|
||||
maxValue: number,
|
||||
): number => {
|
||||
if (!Number.isInteger(value) || (value as number) < 1) {
|
||||
return defaultValue;
|
||||
}
|
||||
return Math.min(value as number, maxValue);
|
||||
};
|
||||
|
||||
const parsePositiveInteger = (value: unknown): number | null => {
|
||||
if (typeof value !== 'number' || !Number.isInteger(value) || value <= 0) {
|
||||
return null;
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
ipc.on(
|
||||
IPC_CHANNELS.command.setIgnoreMouseEvents,
|
||||
(event: unknown, ignore: unknown, options: unknown = {}) => {
|
||||
@@ -224,6 +285,14 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
|
||||
deps.openYomitanSettings();
|
||||
});
|
||||
|
||||
ipc.on(IPC_CHANNELS.command.recordYomitanLookup, () => {
|
||||
deps.immersionTracker?.recordYomitanLookup();
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.command.markActiveVideoWatched, async () => {
|
||||
return (await deps.immersionTracker?.markActiveVideoWatched()) ?? false;
|
||||
});
|
||||
|
||||
ipc.on(IPC_CHANNELS.command.quitApp, () => {
|
||||
deps.quitApp();
|
||||
});
|
||||
@@ -312,6 +381,14 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
|
||||
return deps.getConfiguredShortcuts();
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.getStatsToggleKey, () => {
|
||||
return deps.getStatsToggleKey();
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.getMarkWatchedKey, () => {
|
||||
return deps.getMarkWatchedKey();
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.getControllerConfig, () => {
|
||||
return deps.getControllerConfig();
|
||||
});
|
||||
@@ -397,4 +474,115 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
|
||||
ipc.handle(IPC_CHANNELS.request.appendClipboardVideoToQueue, () => {
|
||||
return deps.appendClipboardVideoToQueue();
|
||||
});
|
||||
|
||||
// Stats request handlers
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetOverview, async () => {
|
||||
const tracker = deps.immersionTracker;
|
||||
if (!tracker) {
|
||||
return {
|
||||
sessions: [],
|
||||
rollups: [],
|
||||
hints: {
|
||||
totalSessions: 0,
|
||||
activeSessions: 0,
|
||||
episodesToday: 0,
|
||||
activeAnimeCount: 0,
|
||||
totalActiveMin: 0,
|
||||
totalCards: 0,
|
||||
activeDays: 0,
|
||||
totalEpisodesWatched: 0,
|
||||
totalAnimeCompleted: 0,
|
||||
totalTokensSeen: 0,
|
||||
totalLookupCount: 0,
|
||||
totalLookupHits: 0,
|
||||
totalYomitanLookupCount: 0,
|
||||
newWordsToday: 0,
|
||||
newWordsThisWeek: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
const [sessions, rollups, hints] = await Promise.all([
|
||||
tracker.getSessionSummaries(5),
|
||||
tracker.getDailyRollups(14),
|
||||
tracker.getQueryHints(),
|
||||
]);
|
||||
return { sessions, rollups, hints };
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetDailyRollups, async (_event, limit: unknown) => {
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 60, 500);
|
||||
return deps.immersionTracker?.getDailyRollups(parsedLimit) ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetMonthlyRollups, async (_event, limit: unknown) => {
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 24, 120);
|
||||
return deps.immersionTracker?.getMonthlyRollups(parsedLimit) ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetSessions, async (_event, limit: unknown) => {
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 50, 500);
|
||||
return deps.immersionTracker?.getSessionSummaries(parsedLimit) ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(
|
||||
IPC_CHANNELS.request.statsGetSessionTimeline,
|
||||
async (_event, sessionId: unknown, limit: unknown) => {
|
||||
const parsedSessionId = parsePositiveInteger(sessionId);
|
||||
if (parsedSessionId === null) return [];
|
||||
const parsedLimit = limit === undefined ? undefined : parsePositiveIntLimit(limit, 200, 1000);
|
||||
return deps.immersionTracker?.getSessionTimeline(parsedSessionId, parsedLimit) ?? [];
|
||||
},
|
||||
);
|
||||
|
||||
ipc.handle(
|
||||
IPC_CHANNELS.request.statsGetSessionEvents,
|
||||
async (_event, sessionId: unknown, limit: unknown) => {
|
||||
const parsedSessionId = parsePositiveInteger(sessionId);
|
||||
if (parsedSessionId === null) return [];
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 500, 1000);
|
||||
return deps.immersionTracker?.getSessionEvents(parsedSessionId, parsedLimit) ?? [];
|
||||
},
|
||||
);
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetVocabulary, async (_event, limit: unknown) => {
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 100, 500);
|
||||
return deps.immersionTracker?.getVocabularyStats(parsedLimit) ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetKanji, async (_event, limit: unknown) => {
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 100, 500);
|
||||
return deps.immersionTracker?.getKanjiStats(parsedLimit) ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetMediaLibrary, async () => {
|
||||
return deps.immersionTracker?.getMediaLibrary() ?? [];
|
||||
});
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetMediaDetail, async (_event, videoId: unknown) => {
|
||||
if (typeof videoId !== 'number') return null;
|
||||
return deps.immersionTracker?.getMediaDetail(videoId) ?? null;
|
||||
});
|
||||
|
||||
ipc.handle(
|
||||
IPC_CHANNELS.request.statsGetMediaSessions,
|
||||
async (_event, videoId: unknown, limit: unknown) => {
|
||||
if (typeof videoId !== 'number') return [];
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 100, 500);
|
||||
return deps.immersionTracker?.getMediaSessions(videoId, parsedLimit) ?? [];
|
||||
},
|
||||
);
|
||||
|
||||
ipc.handle(
|
||||
IPC_CHANNELS.request.statsGetMediaDailyRollups,
|
||||
async (_event, videoId: unknown, limit: unknown) => {
|
||||
if (typeof videoId !== 'number') return [];
|
||||
const parsedLimit = parsePositiveIntLimit(limit, 90, 500);
|
||||
return deps.immersionTracker?.getMediaDailyRollups(videoId, parsedLimit) ?? [];
|
||||
},
|
||||
);
|
||||
|
||||
ipc.handle(IPC_CHANNELS.request.statsGetMediaCover, async (_event, videoId: unknown) => {
|
||||
if (typeof videoId !== 'number') return null;
|
||||
return deps.immersionTracker?.getCoverArt(videoId) ?? null;
|
||||
});
|
||||
}
|
||||
|
||||
@@ -59,9 +59,12 @@ const MPV_SUBTITLE_PROPERTY_OBSERVATIONS: string[] = [
|
||||
'sub-ass-override',
|
||||
'sub-use-margins',
|
||||
'pause',
|
||||
'duration',
|
||||
'media-title',
|
||||
'secondary-sub-visibility',
|
||||
'sub-visibility',
|
||||
'sid',
|
||||
'track-list',
|
||||
];
|
||||
|
||||
const MPV_INITIAL_PROPERTY_REQUESTS: Array<MpvProtocolCommand> = [
|
||||
|
||||
@@ -60,6 +60,8 @@ function createDeps(overrides: Partial<MpvProtocolHandleMessageDeps> = {}): {
|
||||
emitSubtitleAssChange: (payload) => state.events.push(payload),
|
||||
emitSubtitleTiming: (payload) => state.events.push(payload),
|
||||
emitSecondarySubtitleChange: (payload) => state.events.push(payload),
|
||||
emitSubtitleTrackChange: (payload) => state.events.push(payload),
|
||||
emitSubtitleTrackListChange: (payload) => state.events.push(payload),
|
||||
getCurrentSubText: () => state.subText,
|
||||
setCurrentSubText: (text) => {
|
||||
state.subText = text;
|
||||
@@ -87,6 +89,7 @@ function createDeps(overrides: Partial<MpvProtocolHandleMessageDeps> = {}): {
|
||||
getPauseAtTime: () => null,
|
||||
setPauseAtTime: () => {},
|
||||
emitTimePosChange: () => {},
|
||||
emitDurationChange: () => {},
|
||||
emitPauseChange: () => {},
|
||||
autoLoadSecondarySubTrack: () => {},
|
||||
setCurrentVideoPath: () => {},
|
||||
@@ -119,6 +122,21 @@ test('dispatchMpvProtocolMessage emits subtitle text on property change', async
|
||||
assert.deepEqual(state.events, [{ text: '字幕', isOverlayVisible: false }]);
|
||||
});
|
||||
|
||||
test('dispatchMpvProtocolMessage emits subtitle track changes', async () => {
|
||||
const { deps, state } = createDeps({
|
||||
emitSubtitleTrackChange: (payload) => state.events.push(payload),
|
||||
emitSubtitleTrackListChange: (payload) => state.events.push(payload),
|
||||
});
|
||||
|
||||
await dispatchMpvProtocolMessage({ event: 'property-change', name: 'sid', data: '3' }, deps);
|
||||
await dispatchMpvProtocolMessage(
|
||||
{ event: 'property-change', name: 'track-list', data: [{ type: 'sub', id: 3 }] },
|
||||
deps,
|
||||
);
|
||||
|
||||
assert.deepEqual(state.events, [{ sid: 3 }, { trackList: [{ type: 'sub', id: 3 }] }]);
|
||||
});
|
||||
|
||||
test('dispatchMpvProtocolMessage enforces sub-visibility hidden when overlay suppression is enabled', async () => {
|
||||
const { deps, state } = createDeps({
|
||||
isVisibleOverlayVisible: () => true,
|
||||
|
||||
@@ -52,6 +52,8 @@ export interface MpvProtocolHandleMessageDeps {
|
||||
emitSubtitleAssChange: (payload: { text: string }) => void;
|
||||
emitSubtitleTiming: (payload: { text: string; start: number; end: number }) => void;
|
||||
emitSecondarySubtitleChange: (payload: { text: string }) => void;
|
||||
emitSubtitleTrackChange: (payload: { sid: number | null }) => void;
|
||||
emitSubtitleTrackListChange: (payload: { trackList: unknown[] | null }) => void;
|
||||
getCurrentSubText: () => string;
|
||||
setCurrentSubText: (text: string) => void;
|
||||
setCurrentSubStart: (value: number) => void;
|
||||
@@ -61,6 +63,7 @@ export interface MpvProtocolHandleMessageDeps {
|
||||
emitMediaPathChange: (payload: { path: string }) => void;
|
||||
emitMediaTitleChange: (payload: { title: string | null }) => void;
|
||||
emitTimePosChange: (payload: { time: number }) => void;
|
||||
emitDurationChange: (payload: { duration: number }) => void;
|
||||
emitPauseChange: (payload: { paused: boolean }) => void;
|
||||
emitSubtitleMetricsChange: (payload: Partial<MpvSubtitleRenderMetrics>) => void;
|
||||
setCurrentSecondarySubText: (text: string) => void;
|
||||
@@ -159,6 +162,18 @@ export async function dispatchMpvProtocolMessage(
|
||||
const nextSubText = (msg.data as string) || '';
|
||||
deps.setCurrentSecondarySubText(nextSubText);
|
||||
deps.emitSecondarySubtitleChange({ text: nextSubText });
|
||||
} else if (msg.name === 'sid') {
|
||||
const sid =
|
||||
typeof msg.data === 'number'
|
||||
? msg.data
|
||||
: typeof msg.data === 'string'
|
||||
? Number(msg.data)
|
||||
: null;
|
||||
deps.emitSubtitleTrackChange({ sid: sid !== null && Number.isFinite(sid) ? sid : null });
|
||||
} else if (msg.name === 'track-list') {
|
||||
deps.emitSubtitleTrackListChange({
|
||||
trackList: Array.isArray(msg.data) ? (msg.data as unknown[]) : null,
|
||||
});
|
||||
} else if (msg.name === 'aid') {
|
||||
deps.setCurrentAudioTrackId(typeof msg.data === 'number' ? (msg.data as number) : null);
|
||||
deps.syncCurrentAudioStreamIndex();
|
||||
@@ -172,6 +187,11 @@ export async function dispatchMpvProtocolMessage(
|
||||
deps.setPauseAtTime(null);
|
||||
deps.sendCommand({ command: ['set_property', 'pause', true] });
|
||||
}
|
||||
} else if (msg.name === 'duration') {
|
||||
const duration = typeof msg.data === 'number' ? msg.data : 0;
|
||||
if (duration > 0) {
|
||||
deps.emitDurationChange({ duration });
|
||||
}
|
||||
} else if (msg.name === 'pause') {
|
||||
deps.emitPauseChange({ paused: asBoolean(msg.data, false) });
|
||||
} else if (msg.name === 'media-title') {
|
||||
|
||||
@@ -115,8 +115,11 @@ export interface MpvIpcClientEventMap {
|
||||
'subtitle-ass-change': { text: string };
|
||||
'subtitle-timing': { text: string; start: number; end: number };
|
||||
'time-pos-change': { time: number };
|
||||
'duration-change': { duration: number };
|
||||
'pause-change': { paused: boolean };
|
||||
'secondary-subtitle-change': { text: string };
|
||||
'subtitle-track-change': { sid: number | null };
|
||||
'subtitle-track-list-change': { trackList: unknown[] | null };
|
||||
'media-path-change': { path: string };
|
||||
'media-title-change': { title: string | null };
|
||||
'subtitle-metrics-change': { patch: Partial<MpvSubtitleRenderMetrics> };
|
||||
@@ -314,6 +317,9 @@ export class MpvIpcClient implements MpvClient {
|
||||
emitTimePosChange: (payload) => {
|
||||
this.emit('time-pos-change', payload);
|
||||
},
|
||||
emitDurationChange: (payload) => {
|
||||
this.emit('duration-change', payload);
|
||||
},
|
||||
emitPauseChange: (payload) => {
|
||||
this.playbackPaused = payload.paused;
|
||||
this.emit('pause-change', payload);
|
||||
@@ -321,6 +327,12 @@ export class MpvIpcClient implements MpvClient {
|
||||
emitSecondarySubtitleChange: (payload) => {
|
||||
this.emit('secondary-subtitle-change', payload);
|
||||
},
|
||||
emitSubtitleTrackChange: (payload) => {
|
||||
this.emit('subtitle-track-change', payload);
|
||||
},
|
||||
emitSubtitleTrackListChange: (payload) => {
|
||||
this.emit('subtitle-track-list-change', payload);
|
||||
},
|
||||
getCurrentSubText: () => this.currentSubText,
|
||||
setCurrentSubText: (text: string) => {
|
||||
this.currentSubText = text;
|
||||
|
||||
@@ -109,6 +109,60 @@ test('initializeOverlayRuntime starts Anki integration when ankiConnect.enabled
|
||||
assert.equal(setIntegrationCalls, 1);
|
||||
});
|
||||
|
||||
test('initializeOverlayRuntime can skip starting Anki integration transport', () => {
|
||||
let createdIntegrations = 0;
|
||||
let startedIntegrations = 0;
|
||||
let setIntegrationCalls = 0;
|
||||
|
||||
initializeOverlayRuntime({
|
||||
backendOverride: null,
|
||||
createMainWindow: () => {},
|
||||
registerGlobalShortcuts: () => {},
|
||||
updateVisibleOverlayBounds: () => {},
|
||||
isVisibleOverlayVisible: () => false,
|
||||
updateVisibleOverlayVisibility: () => {},
|
||||
getOverlayWindows: () => [],
|
||||
syncOverlayShortcuts: () => {},
|
||||
setWindowTracker: () => {},
|
||||
getMpvSocketPath: () => '/tmp/mpv.sock',
|
||||
createWindowTracker: () => null,
|
||||
getResolvedConfig: () => ({
|
||||
ankiConnect: { enabled: true } as never,
|
||||
}),
|
||||
getSubtitleTimingTracker: () => ({}),
|
||||
getMpvClient: () => ({
|
||||
send: () => {},
|
||||
}),
|
||||
getRuntimeOptionsManager: () => ({
|
||||
getEffectiveAnkiConnectConfig: (config) => config as never,
|
||||
}),
|
||||
createAnkiIntegration: () => {
|
||||
createdIntegrations += 1;
|
||||
return {
|
||||
start: () => {
|
||||
startedIntegrations += 1;
|
||||
},
|
||||
};
|
||||
},
|
||||
setAnkiIntegration: () => {
|
||||
setIntegrationCalls += 1;
|
||||
},
|
||||
showDesktopNotification: () => {},
|
||||
createFieldGroupingCallback: () => async () => ({
|
||||
keepNoteId: 7,
|
||||
deleteNoteId: 8,
|
||||
deleteDuplicate: false,
|
||||
cancelled: false,
|
||||
}),
|
||||
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
|
||||
shouldStartAnkiIntegration: () => false,
|
||||
});
|
||||
|
||||
assert.equal(createdIntegrations, 1);
|
||||
assert.equal(startedIntegrations, 0);
|
||||
assert.equal(setIntegrationCalls, 1);
|
||||
});
|
||||
|
||||
test('initializeOverlayRuntime merges shared ai config with Anki overrides', () => {
|
||||
initializeOverlayRuntime({
|
||||
backendOverride: null,
|
||||
@@ -213,3 +267,49 @@ test('initializeOverlayRuntime re-syncs overlay shortcuts when tracker focus cha
|
||||
tracker.onWindowFocusChange?.(true);
|
||||
assert.equal(syncCalls, 1);
|
||||
});
|
||||
|
||||
test('initializeOverlayRuntime refreshes visible overlay when tracker focus changes while overlay is shown', () => {
|
||||
let visibilityRefreshCalls = 0;
|
||||
const tracker = {
|
||||
onGeometryChange: null as ((...args: unknown[]) => void) | null,
|
||||
onWindowFound: null as ((...args: unknown[]) => void) | null,
|
||||
onWindowLost: null as (() => void) | null,
|
||||
onWindowFocusChange: null as ((focused: boolean) => void) | null,
|
||||
start: () => {},
|
||||
};
|
||||
|
||||
initializeOverlayRuntime({
|
||||
backendOverride: null,
|
||||
createMainWindow: () => {},
|
||||
registerGlobalShortcuts: () => {},
|
||||
updateVisibleOverlayBounds: () => {},
|
||||
isVisibleOverlayVisible: () => true,
|
||||
updateVisibleOverlayVisibility: () => {
|
||||
visibilityRefreshCalls += 1;
|
||||
},
|
||||
getOverlayWindows: () => [],
|
||||
syncOverlayShortcuts: () => {},
|
||||
setWindowTracker: () => {},
|
||||
getMpvSocketPath: () => '/tmp/mpv.sock',
|
||||
createWindowTracker: () => tracker as never,
|
||||
getResolvedConfig: () => ({
|
||||
ankiConnect: { enabled: false } as never,
|
||||
}),
|
||||
getSubtitleTimingTracker: () => null,
|
||||
getMpvClient: () => null,
|
||||
getRuntimeOptionsManager: () => null,
|
||||
setAnkiIntegration: () => {},
|
||||
showDesktopNotification: () => {},
|
||||
createFieldGroupingCallback: () => async () => ({
|
||||
keepNoteId: 1,
|
||||
deleteNoteId: 2,
|
||||
deleteDuplicate: false,
|
||||
cancelled: false,
|
||||
}),
|
||||
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
|
||||
});
|
||||
|
||||
tracker.onWindowFocusChange?.(true);
|
||||
|
||||
assert.equal(visibilityRefreshCalls, 2);
|
||||
});
|
||||
|
||||
@@ -75,6 +75,7 @@ export function initializeOverlayRuntime(options: {
|
||||
data: KikuFieldGroupingRequestData,
|
||||
) => Promise<KikuFieldGroupingChoice>;
|
||||
getKnownWordCacheStatePath: () => string;
|
||||
shouldStartAnkiIntegration?: () => boolean;
|
||||
createAnkiIntegration?: (args: CreateAnkiIntegrationArgs) => AnkiIntegrationLike;
|
||||
}): void {
|
||||
options.createMainWindow();
|
||||
@@ -90,9 +91,6 @@ export function initializeOverlayRuntime(options: {
|
||||
windowTracker.onGeometryChange = (geometry: WindowGeometry) => {
|
||||
options.updateVisibleOverlayBounds(geometry);
|
||||
};
|
||||
windowTracker.onTargetWindowFocusChange = () => {
|
||||
options.syncOverlayShortcuts();
|
||||
};
|
||||
windowTracker.onWindowFound = (geometry: WindowGeometry) => {
|
||||
options.updateVisibleOverlayBounds(geometry);
|
||||
if (options.isVisibleOverlayVisible()) {
|
||||
@@ -106,6 +104,9 @@ export function initializeOverlayRuntime(options: {
|
||||
options.syncOverlayShortcuts();
|
||||
};
|
||||
windowTracker.onWindowFocusChange = () => {
|
||||
if (options.isVisibleOverlayVisible()) {
|
||||
options.updateVisibleOverlayVisibility();
|
||||
}
|
||||
options.syncOverlayShortcuts();
|
||||
};
|
||||
windowTracker.start();
|
||||
@@ -135,7 +136,9 @@ export function initializeOverlayRuntime(options: {
|
||||
createFieldGroupingCallback: options.createFieldGroupingCallback,
|
||||
knownWordCacheStatePath: options.getKnownWordCacheStatePath(),
|
||||
});
|
||||
integration.start();
|
||||
if (options.shouldStartAnkiIntegration?.() !== false) {
|
||||
integration.start();
|
||||
}
|
||||
options.setAnkiIntegration(integration);
|
||||
}
|
||||
|
||||
|
||||
@@ -200,6 +200,81 @@ test('Windows visible overlay stays click-through and does not steal focus while
|
||||
assert.ok(!calls.includes('focus'));
|
||||
});
|
||||
|
||||
test('macOS tracked visible overlay stays visible without passively stealing focus', () => {
|
||||
const { window, calls } = createMainWindowRecorder();
|
||||
const tracker: WindowTrackerStub = {
|
||||
isTracking: () => true,
|
||||
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
|
||||
};
|
||||
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: true,
|
||||
mainWindow: window as never,
|
||||
windowTracker: tracker as never,
|
||||
trackerNotReadyWarningShown: false,
|
||||
setTrackerNotReadyWarningShown: () => {},
|
||||
updateVisibleOverlayBounds: () => {
|
||||
calls.push('update-bounds');
|
||||
},
|
||||
ensureOverlayWindowLevel: () => {
|
||||
calls.push('ensure-level');
|
||||
},
|
||||
syncPrimaryOverlayWindowLayer: () => {
|
||||
calls.push('sync-layer');
|
||||
},
|
||||
enforceOverlayLayerOrder: () => {
|
||||
calls.push('enforce-order');
|
||||
},
|
||||
syncOverlayShortcuts: () => {
|
||||
calls.push('sync-shortcuts');
|
||||
},
|
||||
isMacOSPlatform: true,
|
||||
isWindowsPlatform: false,
|
||||
} as never);
|
||||
|
||||
assert.ok(calls.includes('mouse-ignore:false:plain'));
|
||||
assert.ok(calls.includes('show'));
|
||||
assert.ok(!calls.includes('focus'));
|
||||
});
|
||||
|
||||
test('forced mouse passthrough keeps macOS tracked overlay passive while visible', () => {
|
||||
const { window, calls } = createMainWindowRecorder();
|
||||
const tracker: WindowTrackerStub = {
|
||||
isTracking: () => true,
|
||||
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
|
||||
};
|
||||
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: true,
|
||||
mainWindow: window as never,
|
||||
windowTracker: tracker as never,
|
||||
trackerNotReadyWarningShown: false,
|
||||
setTrackerNotReadyWarningShown: () => {},
|
||||
updateVisibleOverlayBounds: () => {
|
||||
calls.push('update-bounds');
|
||||
},
|
||||
ensureOverlayWindowLevel: () => {
|
||||
calls.push('ensure-level');
|
||||
},
|
||||
syncPrimaryOverlayWindowLayer: () => {
|
||||
calls.push('sync-layer');
|
||||
},
|
||||
enforceOverlayLayerOrder: () => {
|
||||
calls.push('enforce-order');
|
||||
},
|
||||
syncOverlayShortcuts: () => {
|
||||
calls.push('sync-shortcuts');
|
||||
},
|
||||
isMacOSPlatform: true,
|
||||
isWindowsPlatform: false,
|
||||
forceMousePassthrough: true,
|
||||
} as never);
|
||||
|
||||
assert.ok(calls.includes('mouse-ignore:true:forward'));
|
||||
assert.ok(calls.includes('show'));
|
||||
assert.ok(!calls.includes('focus'));
|
||||
});
|
||||
|
||||
test('Windows keeps visible overlay hidden while tracker is not ready', () => {
|
||||
const { window, calls } = createMainWindowRecorder();
|
||||
let trackerWarning = false;
|
||||
@@ -283,6 +358,59 @@ test('macOS keeps visible overlay hidden while tracker is not initialized yet',
|
||||
assert.ok(!calls.includes('update-bounds'));
|
||||
});
|
||||
|
||||
test('macOS suppresses immediate repeat loading OSD after tracker recovery until cooldown expires', () => {
|
||||
const { window } = createMainWindowRecorder();
|
||||
const osdMessages: string[] = [];
|
||||
let trackerWarning = false;
|
||||
let lastLoadingOsdAtMs: number | null = null;
|
||||
let nowMs = 1_000;
|
||||
const hiddenTracker: WindowTrackerStub = {
|
||||
isTracking: () => false,
|
||||
getGeometry: () => null,
|
||||
};
|
||||
const trackedTracker: WindowTrackerStub = {
|
||||
isTracking: () => true,
|
||||
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
|
||||
};
|
||||
|
||||
const run = (windowTracker: WindowTrackerStub) =>
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: true,
|
||||
mainWindow: window as never,
|
||||
windowTracker: windowTracker as never,
|
||||
trackerNotReadyWarningShown: trackerWarning,
|
||||
setTrackerNotReadyWarningShown: (shown: boolean) => {
|
||||
trackerWarning = shown;
|
||||
},
|
||||
updateVisibleOverlayBounds: () => {},
|
||||
ensureOverlayWindowLevel: () => {},
|
||||
syncPrimaryOverlayWindowLayer: () => {},
|
||||
enforceOverlayLayerOrder: () => {},
|
||||
syncOverlayShortcuts: () => {},
|
||||
isMacOSPlatform: true,
|
||||
showOverlayLoadingOsd: (message: string) => {
|
||||
osdMessages.push(message);
|
||||
},
|
||||
shouldShowOverlayLoadingOsd: () =>
|
||||
lastLoadingOsdAtMs === null || nowMs - lastLoadingOsdAtMs >= 5_000,
|
||||
markOverlayLoadingOsdShown: () => {
|
||||
lastLoadingOsdAtMs = nowMs;
|
||||
},
|
||||
} as never);
|
||||
|
||||
run(hiddenTracker);
|
||||
run(trackedTracker);
|
||||
|
||||
nowMs = 2_000;
|
||||
run(hiddenTracker);
|
||||
run(trackedTracker);
|
||||
|
||||
nowMs = 6_500;
|
||||
run(hiddenTracker);
|
||||
|
||||
assert.deepEqual(osdMessages, ['Overlay loading...', 'Overlay loading...']);
|
||||
});
|
||||
|
||||
test('setVisibleOverlayVisible does not mutate mpv subtitle visibility directly', () => {
|
||||
const calls: string[] = [];
|
||||
setVisibleOverlayVisible({
|
||||
@@ -298,10 +426,12 @@ test('setVisibleOverlayVisible does not mutate mpv subtitle visibility directly'
|
||||
assert.deepEqual(calls, ['state:true', 'update']);
|
||||
});
|
||||
|
||||
test('macOS loading OSD can show again after overlay is hidden and retried', () => {
|
||||
test('macOS explicit hide resets loading OSD suppression before retry', () => {
|
||||
const { window, calls } = createMainWindowRecorder();
|
||||
const osdMessages: string[] = [];
|
||||
let trackerWarning = false;
|
||||
let lastLoadingOsdAtMs: number | null = null;
|
||||
let nowMs = 1_000;
|
||||
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: true,
|
||||
@@ -331,8 +461,17 @@ test('macOS loading OSD can show again after overlay is hidden and retried', ()
|
||||
showOverlayLoadingOsd: (message: string) => {
|
||||
osdMessages.push(message);
|
||||
},
|
||||
shouldShowOverlayLoadingOsd: () =>
|
||||
lastLoadingOsdAtMs === null || nowMs - lastLoadingOsdAtMs >= 5_000,
|
||||
markOverlayLoadingOsdShown: () => {
|
||||
lastLoadingOsdAtMs = nowMs;
|
||||
},
|
||||
resetOverlayLoadingOsdSuppression: () => {
|
||||
lastLoadingOsdAtMs = null;
|
||||
},
|
||||
} as never);
|
||||
|
||||
nowMs = 1_500;
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: false,
|
||||
mainWindow: window as never,
|
||||
@@ -349,6 +488,9 @@ test('macOS loading OSD can show again after overlay is hidden and retried', ()
|
||||
syncOverlayShortcuts: () => {},
|
||||
isMacOSPlatform: true,
|
||||
showOverlayLoadingOsd: () => {},
|
||||
resetOverlayLoadingOsdSuppression: () => {
|
||||
lastLoadingOsdAtMs = null;
|
||||
},
|
||||
} as never);
|
||||
|
||||
updateVisibleOverlayVisibility({
|
||||
@@ -379,6 +521,14 @@ test('macOS loading OSD can show again after overlay is hidden and retried', ()
|
||||
showOverlayLoadingOsd: (message: string) => {
|
||||
osdMessages.push(message);
|
||||
},
|
||||
shouldShowOverlayLoadingOsd: () =>
|
||||
lastLoadingOsdAtMs === null || nowMs - lastLoadingOsdAtMs >= 5_000,
|
||||
markOverlayLoadingOsdShown: () => {
|
||||
lastLoadingOsdAtMs = nowMs;
|
||||
},
|
||||
resetOverlayLoadingOsdSuppression: () => {
|
||||
lastLoadingOsdAtMs = null;
|
||||
},
|
||||
} as never);
|
||||
|
||||
assert.deepEqual(osdMessages, ['Overlay loading...', 'Overlay loading...']);
|
||||
|
||||
@@ -4,6 +4,7 @@ import { WindowGeometry } from '../../types';
|
||||
|
||||
export function updateVisibleOverlayVisibility(args: {
|
||||
visibleOverlayVisible: boolean;
|
||||
forceMousePassthrough?: boolean;
|
||||
mainWindow: BrowserWindow | null;
|
||||
windowTracker: BaseWindowTracker | null;
|
||||
trackerNotReadyWarningShown: boolean;
|
||||
@@ -16,6 +17,9 @@ export function updateVisibleOverlayVisibility(args: {
|
||||
isMacOSPlatform?: boolean;
|
||||
isWindowsPlatform?: boolean;
|
||||
showOverlayLoadingOsd?: (message: string) => void;
|
||||
shouldShowOverlayLoadingOsd?: () => boolean;
|
||||
markOverlayLoadingOsdShown?: () => void;
|
||||
resetOverlayLoadingOsdSuppression?: () => void;
|
||||
resolveFallbackBounds?: () => WindowGeometry;
|
||||
}): void {
|
||||
if (!args.mainWindow || args.mainWindow.isDestroyed()) {
|
||||
@@ -25,20 +29,33 @@ export function updateVisibleOverlayVisibility(args: {
|
||||
const mainWindow = args.mainWindow;
|
||||
|
||||
const showPassiveVisibleOverlay = (): void => {
|
||||
if (args.isWindowsPlatform) {
|
||||
const forceMousePassthrough = args.forceMousePassthrough === true;
|
||||
if (args.isWindowsPlatform || forceMousePassthrough) {
|
||||
mainWindow.setIgnoreMouseEvents(true, { forward: true });
|
||||
} else {
|
||||
mainWindow.setIgnoreMouseEvents(false);
|
||||
}
|
||||
args.ensureOverlayWindowLevel(mainWindow);
|
||||
mainWindow.show();
|
||||
if (!args.isWindowsPlatform) {
|
||||
if (!args.isWindowsPlatform && !args.isMacOSPlatform && !forceMousePassthrough) {
|
||||
mainWindow.focus();
|
||||
}
|
||||
};
|
||||
|
||||
const maybeShowOverlayLoadingOsd = (): void => {
|
||||
if (!args.isMacOSPlatform || !args.showOverlayLoadingOsd) {
|
||||
return;
|
||||
}
|
||||
if (args.shouldShowOverlayLoadingOsd && !args.shouldShowOverlayLoadingOsd()) {
|
||||
return;
|
||||
}
|
||||
args.showOverlayLoadingOsd('Overlay loading...');
|
||||
args.markOverlayLoadingOsdShown?.();
|
||||
};
|
||||
|
||||
if (!args.visibleOverlayVisible) {
|
||||
args.setTrackerNotReadyWarningShown(false);
|
||||
args.resetOverlayLoadingOsdSuppression?.();
|
||||
mainWindow.hide();
|
||||
args.syncOverlayShortcuts();
|
||||
return;
|
||||
@@ -61,9 +78,7 @@ export function updateVisibleOverlayVisibility(args: {
|
||||
if (args.isMacOSPlatform || args.isWindowsPlatform) {
|
||||
if (!args.trackerNotReadyWarningShown) {
|
||||
args.setTrackerNotReadyWarningShown(true);
|
||||
if (args.isMacOSPlatform) {
|
||||
args.showOverlayLoadingOsd?.('Overlay loading...');
|
||||
}
|
||||
maybeShowOverlayLoadingOsd();
|
||||
}
|
||||
mainWindow.hide();
|
||||
args.syncOverlayShortcuts();
|
||||
@@ -79,9 +94,7 @@ export function updateVisibleOverlayVisibility(args: {
|
||||
|
||||
if (!args.trackerNotReadyWarningShown) {
|
||||
args.setTrackerNotReadyWarningShown(true);
|
||||
if (args.isMacOSPlatform) {
|
||||
args.showOverlayLoadingOsd?.('Overlay loading...');
|
||||
}
|
||||
maybeShowOverlayLoadingOsd();
|
||||
}
|
||||
|
||||
mainWindow.hide();
|
||||
|
||||
@@ -46,6 +46,7 @@ export function ensureOverlayWindowLevel(window: BrowserWindow): void {
|
||||
window.setAlwaysOnTop(true, 'screen-saver', 1);
|
||||
window.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true });
|
||||
window.setFullScreenable(false);
|
||||
window.moveTop();
|
||||
return;
|
||||
}
|
||||
if (process.platform === 'win32') {
|
||||
|
||||
@@ -34,6 +34,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
stats: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
|
||||
196
src/core/services/startup.test.ts
Normal file
196
src/core/services/startup.test.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { runAppReadyRuntime } from './startup';
|
||||
|
||||
test('runAppReadyRuntime minimal startup skips Yomitan and first-run setup while still handling CLI args', async () => {
|
||||
const calls: string[] = [];
|
||||
|
||||
await runAppReadyRuntime({
|
||||
ensureDefaultConfigBootstrap: () => {
|
||||
calls.push('bootstrap');
|
||||
},
|
||||
loadSubtitlePosition: () => {
|
||||
calls.push('load-subtitle-position');
|
||||
},
|
||||
resolveKeybindings: () => {
|
||||
calls.push('resolve-keybindings');
|
||||
},
|
||||
createMpvClient: () => {
|
||||
calls.push('create-mpv');
|
||||
},
|
||||
reloadConfig: () => {
|
||||
calls.push('reload-config');
|
||||
},
|
||||
getResolvedConfig: () => ({}),
|
||||
getConfigWarnings: () => [],
|
||||
logConfigWarning: () => {
|
||||
calls.push('config-warning');
|
||||
},
|
||||
setLogLevel: () => {
|
||||
calls.push('set-log-level');
|
||||
},
|
||||
initRuntimeOptionsManager: () => {
|
||||
calls.push('init-runtime-options');
|
||||
},
|
||||
setSecondarySubMode: () => {
|
||||
calls.push('set-secondary-sub-mode');
|
||||
},
|
||||
defaultSecondarySubMode: 'hover',
|
||||
defaultWebsocketPort: 0,
|
||||
defaultAnnotationWebsocketPort: 0,
|
||||
defaultTexthookerPort: 0,
|
||||
hasMpvWebsocketPlugin: () => false,
|
||||
startSubtitleWebsocket: () => {
|
||||
calls.push('subtitle-ws');
|
||||
},
|
||||
startAnnotationWebsocket: () => {
|
||||
calls.push('annotation-ws');
|
||||
},
|
||||
startTexthooker: () => {
|
||||
calls.push('texthooker');
|
||||
},
|
||||
log: () => {
|
||||
calls.push('log');
|
||||
},
|
||||
createMecabTokenizerAndCheck: async () => {
|
||||
calls.push('mecab');
|
||||
},
|
||||
createSubtitleTimingTracker: () => {
|
||||
calls.push('subtitle-timing');
|
||||
},
|
||||
createImmersionTracker: () => {
|
||||
calls.push('immersion');
|
||||
},
|
||||
startJellyfinRemoteSession: async () => {
|
||||
calls.push('jellyfin');
|
||||
},
|
||||
loadYomitanExtension: async () => {
|
||||
calls.push('load-yomitan');
|
||||
},
|
||||
handleFirstRunSetup: async () => {
|
||||
calls.push('first-run');
|
||||
},
|
||||
prewarmSubtitleDictionaries: async () => {
|
||||
calls.push('prewarm');
|
||||
},
|
||||
startBackgroundWarmups: () => {
|
||||
calls.push('warmups');
|
||||
},
|
||||
texthookerOnlyMode: false,
|
||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => false,
|
||||
setVisibleOverlayVisible: () => {
|
||||
calls.push('visible-overlay');
|
||||
},
|
||||
initializeOverlayRuntime: () => {
|
||||
calls.push('init-overlay');
|
||||
},
|
||||
handleInitialArgs: () => {
|
||||
calls.push('handle-initial-args');
|
||||
},
|
||||
shouldUseMinimalStartup: () => true,
|
||||
shouldSkipHeavyStartup: () => false,
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, ['bootstrap', 'reload-config', 'handle-initial-args']);
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime headless refresh bootstraps Anki runtime without UI startup', async () => {
|
||||
const calls: string[] = [];
|
||||
|
||||
await runAppReadyRuntime({
|
||||
ensureDefaultConfigBootstrap: () => {
|
||||
calls.push('bootstrap');
|
||||
},
|
||||
loadSubtitlePosition: () => {
|
||||
calls.push('load-subtitle-position');
|
||||
},
|
||||
resolveKeybindings: () => {
|
||||
calls.push('resolve-keybindings');
|
||||
},
|
||||
createMpvClient: () => {
|
||||
calls.push('create-mpv');
|
||||
},
|
||||
reloadConfig: () => {
|
||||
calls.push('reload-config');
|
||||
},
|
||||
getResolvedConfig: () => ({}),
|
||||
getConfigWarnings: () => [],
|
||||
logConfigWarning: () => {
|
||||
calls.push('config-warning');
|
||||
},
|
||||
setLogLevel: () => {
|
||||
calls.push('set-log-level');
|
||||
},
|
||||
initRuntimeOptionsManager: () => {
|
||||
calls.push('init-runtime-options');
|
||||
},
|
||||
setSecondarySubMode: () => {
|
||||
calls.push('set-secondary-sub-mode');
|
||||
},
|
||||
defaultSecondarySubMode: 'hover',
|
||||
defaultWebsocketPort: 0,
|
||||
defaultAnnotationWebsocketPort: 0,
|
||||
defaultTexthookerPort: 0,
|
||||
hasMpvWebsocketPlugin: () => false,
|
||||
startSubtitleWebsocket: () => {
|
||||
calls.push('subtitle-ws');
|
||||
},
|
||||
startAnnotationWebsocket: () => {
|
||||
calls.push('annotation-ws');
|
||||
},
|
||||
startTexthooker: () => {
|
||||
calls.push('texthooker');
|
||||
},
|
||||
log: () => {
|
||||
calls.push('log');
|
||||
},
|
||||
createMecabTokenizerAndCheck: async () => {
|
||||
calls.push('mecab');
|
||||
},
|
||||
createSubtitleTimingTracker: () => {
|
||||
calls.push('subtitle-timing');
|
||||
},
|
||||
createImmersionTracker: () => {
|
||||
calls.push('immersion');
|
||||
},
|
||||
startJellyfinRemoteSession: async () => {
|
||||
calls.push('jellyfin');
|
||||
},
|
||||
loadYomitanExtension: async () => {
|
||||
calls.push('load-yomitan');
|
||||
},
|
||||
handleFirstRunSetup: async () => {
|
||||
calls.push('first-run');
|
||||
},
|
||||
prewarmSubtitleDictionaries: async () => {
|
||||
calls.push('prewarm');
|
||||
},
|
||||
startBackgroundWarmups: () => {
|
||||
calls.push('warmups');
|
||||
},
|
||||
texthookerOnlyMode: false,
|
||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => false,
|
||||
setVisibleOverlayVisible: () => {
|
||||
calls.push('visible-overlay');
|
||||
},
|
||||
initializeOverlayRuntime: () => {
|
||||
calls.push('init-overlay');
|
||||
},
|
||||
runHeadlessInitialCommand: async () => {
|
||||
calls.push('run-headless-command');
|
||||
},
|
||||
handleInitialArgs: () => {
|
||||
calls.push('handle-initial-args');
|
||||
},
|
||||
shouldRunHeadlessInitialCommand: () => true,
|
||||
shouldUseMinimalStartup: () => false,
|
||||
shouldSkipHeavyStartup: () => false,
|
||||
});
|
||||
|
||||
assert.deepEqual(calls, [
|
||||
'bootstrap',
|
||||
'reload-config',
|
||||
'init-runtime-options',
|
||||
'run-headless-command',
|
||||
]);
|
||||
});
|
||||
@@ -131,10 +131,13 @@ export interface AppReadyRuntimeDeps {
|
||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
|
||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
||||
initializeOverlayRuntime: () => void;
|
||||
runHeadlessInitialCommand?: () => Promise<void>;
|
||||
handleInitialArgs: () => void;
|
||||
logDebug?: (message: string) => void;
|
||||
onCriticalConfigErrors?: (errors: string[]) => void;
|
||||
now?: () => number;
|
||||
shouldRunHeadlessInitialCommand?: () => boolean;
|
||||
shouldUseMinimalStartup?: () => boolean;
|
||||
shouldSkipHeavyStartup?: () => boolean;
|
||||
}
|
||||
|
||||
@@ -183,6 +186,32 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
const now = deps.now ?? (() => Date.now());
|
||||
const startupStartedAtMs = now();
|
||||
deps.ensureDefaultConfigBootstrap();
|
||||
if (deps.shouldRunHeadlessInitialCommand?.()) {
|
||||
deps.reloadConfig();
|
||||
deps.initRuntimeOptionsManager();
|
||||
if (deps.runHeadlessInitialCommand) {
|
||||
await deps.runHeadlessInitialCommand();
|
||||
} else {
|
||||
deps.createMpvClient();
|
||||
deps.createSubtitleTimingTracker();
|
||||
deps.initializeOverlayRuntime();
|
||||
deps.handleInitialArgs();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (deps.texthookerOnlyMode) {
|
||||
deps.reloadConfig();
|
||||
deps.handleInitialArgs();
|
||||
return;
|
||||
}
|
||||
|
||||
if (deps.shouldUseMinimalStartup?.()) {
|
||||
deps.reloadConfig();
|
||||
deps.handleInitialArgs();
|
||||
return;
|
||||
}
|
||||
|
||||
if (deps.shouldSkipHeavyStartup?.()) {
|
||||
await deps.loadYomitanExtension();
|
||||
deps.reloadConfig();
|
||||
|
||||
1015
src/core/services/stats-server.ts
Normal file
1015
src/core/services/stats-server.ts
Normal file
File diff suppressed because it is too large
Load Diff
88
src/core/services/stats-window-runtime.ts
Normal file
88
src/core/services/stats-window-runtime.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import type { BrowserWindow, BrowserWindowConstructorOptions } from 'electron';
|
||||
import type { WindowGeometry } from '../../types';
|
||||
|
||||
const DEFAULT_STATS_WINDOW_WIDTH = 900;
|
||||
const DEFAULT_STATS_WINDOW_HEIGHT = 700;
|
||||
|
||||
type StatsWindowLevelController = Pick<BrowserWindow, 'setAlwaysOnTop' | 'moveTop'> &
|
||||
Partial<Pick<BrowserWindow, 'setVisibleOnAllWorkspaces' | 'setFullScreenable'>>;
|
||||
|
||||
function isBareToggleKeyInput(input: Electron.Input, toggleKey: string): boolean {
|
||||
return (
|
||||
input.type === 'keyDown' &&
|
||||
input.code === toggleKey &&
|
||||
!input.control &&
|
||||
!input.alt &&
|
||||
!input.meta &&
|
||||
!input.shift &&
|
||||
!input.isAutoRepeat
|
||||
);
|
||||
}
|
||||
|
||||
export function shouldHideStatsWindowForInput(input: Electron.Input, toggleKey: string): boolean {
|
||||
return (
|
||||
(input.type === 'keyDown' && input.key === 'Escape') || isBareToggleKeyInput(input, toggleKey)
|
||||
);
|
||||
}
|
||||
|
||||
export function buildStatsWindowOptions(options: {
|
||||
preloadPath: string;
|
||||
bounds?: WindowGeometry | null;
|
||||
}): BrowserWindowConstructorOptions {
|
||||
return {
|
||||
x: options.bounds?.x,
|
||||
y: options.bounds?.y,
|
||||
width: options.bounds?.width ?? DEFAULT_STATS_WINDOW_WIDTH,
|
||||
height: options.bounds?.height ?? DEFAULT_STATS_WINDOW_HEIGHT,
|
||||
frame: false,
|
||||
transparent: true,
|
||||
alwaysOnTop: true,
|
||||
resizable: false,
|
||||
skipTaskbar: true,
|
||||
hasShadow: false,
|
||||
focusable: true,
|
||||
acceptFirstMouse: true,
|
||||
fullscreenable: false,
|
||||
backgroundColor: '#1e1e2e',
|
||||
show: false,
|
||||
webPreferences: {
|
||||
nodeIntegration: false,
|
||||
contextIsolation: true,
|
||||
preload: options.preloadPath,
|
||||
sandbox: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function promoteStatsWindowLevel(
|
||||
window: StatsWindowLevelController,
|
||||
platform: NodeJS.Platform = process.platform,
|
||||
): void {
|
||||
if (platform === 'darwin') {
|
||||
window.setAlwaysOnTop(true, 'screen-saver', 2);
|
||||
window.setVisibleOnAllWorkspaces?.(true, { visibleOnFullScreen: true });
|
||||
window.setFullScreenable?.(false);
|
||||
window.moveTop();
|
||||
return;
|
||||
}
|
||||
|
||||
if (platform === 'win32') {
|
||||
window.setAlwaysOnTop(true, 'screen-saver', 2);
|
||||
window.moveTop();
|
||||
return;
|
||||
}
|
||||
|
||||
window.setAlwaysOnTop(true);
|
||||
window.moveTop();
|
||||
}
|
||||
|
||||
export function buildStatsWindowLoadFileOptions(apiBaseUrl?: string): {
|
||||
query: Record<string, string>;
|
||||
} {
|
||||
return {
|
||||
query: {
|
||||
overlay: '1',
|
||||
...(apiBaseUrl ? { apiBase: apiBaseUrl } : {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
202
src/core/services/stats-window.test.ts
Normal file
202
src/core/services/stats-window.test.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import {
|
||||
buildStatsWindowLoadFileOptions,
|
||||
buildStatsWindowOptions,
|
||||
promoteStatsWindowLevel,
|
||||
shouldHideStatsWindowForInput,
|
||||
} from './stats-window-runtime';
|
||||
|
||||
test('buildStatsWindowOptions uses tracked overlay bounds and preload-friendly web preferences', () => {
|
||||
const options = buildStatsWindowOptions({
|
||||
preloadPath: '/tmp/preload-stats.js',
|
||||
bounds: {
|
||||
x: 120,
|
||||
y: 80,
|
||||
width: 1440,
|
||||
height: 900,
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(options.x, 120);
|
||||
assert.equal(options.y, 80);
|
||||
assert.equal(options.width, 1440);
|
||||
assert.equal(options.height, 900);
|
||||
assert.equal(options.frame, false);
|
||||
assert.equal(options.transparent, true);
|
||||
assert.equal(options.resizable, false);
|
||||
assert.equal(options.webPreferences?.preload, '/tmp/preload-stats.js');
|
||||
assert.equal(options.webPreferences?.contextIsolation, true);
|
||||
assert.equal(options.webPreferences?.nodeIntegration, false);
|
||||
assert.equal(options.webPreferences?.sandbox, true);
|
||||
});
|
||||
|
||||
test('shouldHideStatsWindowForInput matches Escape and configured bare toggle key', () => {
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: 'Escape',
|
||||
code: 'Escape',
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
true,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
control: true,
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
alt: true,
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
meta: true,
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
isAutoRepeat: true,
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyDown',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
shift: true,
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
shouldHideStatsWindowForInput(
|
||||
{
|
||||
type: 'keyUp',
|
||||
key: '`',
|
||||
code: 'Backquote',
|
||||
} as Electron.Input,
|
||||
'Backquote',
|
||||
),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('buildStatsWindowLoadFileOptions enables overlay rendering mode', () => {
|
||||
assert.deepEqual(buildStatsWindowLoadFileOptions(), {
|
||||
query: {
|
||||
overlay: '1',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('buildStatsWindowLoadFileOptions includes provided stats API base URL', () => {
|
||||
assert.deepEqual(buildStatsWindowLoadFileOptions('http://127.0.0.1:6123'), {
|
||||
query: {
|
||||
overlay: '1',
|
||||
apiBase: 'http://127.0.0.1:6123',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
test('promoteStatsWindowLevel raises stats above overlay level on macOS', () => {
|
||||
const calls: string[] = [];
|
||||
promoteStatsWindowLevel(
|
||||
{
|
||||
setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => {
|
||||
calls.push(`always-on-top:${flag}:${level ?? 'none'}:${relativeLevel ?? 0}`);
|
||||
},
|
||||
setVisibleOnAllWorkspaces: (
|
||||
visible: boolean,
|
||||
options?: { visibleOnFullScreen?: boolean },
|
||||
) => {
|
||||
calls.push(
|
||||
`all-workspaces:${visible}:${options?.visibleOnFullScreen === true ? 'fullscreen' : 'plain'}`,
|
||||
);
|
||||
},
|
||||
setFullScreenable: (fullscreenable: boolean) => {
|
||||
calls.push(`fullscreenable:${fullscreenable}`);
|
||||
},
|
||||
moveTop: () => {
|
||||
calls.push('move-top');
|
||||
},
|
||||
} as never,
|
||||
'darwin',
|
||||
);
|
||||
|
||||
assert.deepEqual(calls, [
|
||||
'always-on-top:true:screen-saver:2',
|
||||
'all-workspaces:true:fullscreen',
|
||||
'fullscreenable:false',
|
||||
'move-top',
|
||||
]);
|
||||
});
|
||||
|
||||
test('promoteStatsWindowLevel raises stats above overlay level on Windows', () => {
|
||||
const calls: string[] = [];
|
||||
promoteStatsWindowLevel(
|
||||
{
|
||||
setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => {
|
||||
calls.push(`always-on-top:${flag}:${level ?? 'none'}:${relativeLevel ?? 0}`);
|
||||
},
|
||||
moveTop: () => {
|
||||
calls.push('move-top');
|
||||
},
|
||||
} as never,
|
||||
'win32',
|
||||
);
|
||||
|
||||
assert.deepEqual(calls, ['always-on-top:true:screen-saver:2', 'move-top']);
|
||||
});
|
||||
118
src/core/services/stats-window.ts
Normal file
118
src/core/services/stats-window.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import { BrowserWindow, ipcMain } from 'electron';
|
||||
import * as path from 'path';
|
||||
import type { WindowGeometry } from '../../types.js';
|
||||
import { IPC_CHANNELS } from '../../shared/ipc/contracts.js';
|
||||
import {
|
||||
buildStatsWindowLoadFileOptions,
|
||||
buildStatsWindowOptions,
|
||||
promoteStatsWindowLevel,
|
||||
shouldHideStatsWindowForInput,
|
||||
} from './stats-window-runtime.js';
|
||||
|
||||
let statsWindow: BrowserWindow | null = null;
|
||||
let toggleRegistered = false;
|
||||
|
||||
export interface StatsWindowOptions {
|
||||
/** Absolute path to stats/dist/ directory */
|
||||
staticDir: string;
|
||||
/** Absolute path to the compiled preload-stats.js */
|
||||
preloadPath: string;
|
||||
/** Resolve the active stats API base URL */
|
||||
getApiBaseUrl?: () => string;
|
||||
/** Resolve the active stats toggle key from config */
|
||||
getToggleKey: () => string;
|
||||
/** Resolve the tracked overlay/mpv bounds */
|
||||
resolveBounds: () => WindowGeometry | null;
|
||||
/** Notify the main process when the stats overlay becomes visible/hidden */
|
||||
onVisibilityChanged?: (visible: boolean) => void;
|
||||
}
|
||||
|
||||
function syncStatsWindowBounds(window: BrowserWindow, bounds: WindowGeometry | null): void {
|
||||
if (!bounds || window.isDestroyed()) return;
|
||||
window.setBounds({
|
||||
x: bounds.x,
|
||||
y: bounds.y,
|
||||
width: bounds.width,
|
||||
height: bounds.height,
|
||||
});
|
||||
}
|
||||
|
||||
function showStatsWindow(window: BrowserWindow, options: StatsWindowOptions): void {
|
||||
syncStatsWindowBounds(window, options.resolveBounds());
|
||||
promoteStatsWindowLevel(window);
|
||||
window.show();
|
||||
window.focus();
|
||||
options.onVisibilityChanged?.(true);
|
||||
promoteStatsWindowLevel(window);
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle the stats overlay window: create on first call, then show/hide.
|
||||
* The React app stays mounted across toggles — state is preserved.
|
||||
*/
|
||||
export function toggleStatsOverlay(options: StatsWindowOptions): void {
|
||||
if (!statsWindow) {
|
||||
statsWindow = new BrowserWindow(
|
||||
buildStatsWindowOptions({
|
||||
preloadPath: options.preloadPath,
|
||||
bounds: options.resolveBounds(),
|
||||
}),
|
||||
);
|
||||
|
||||
const indexPath = path.join(options.staticDir, 'index.html');
|
||||
statsWindow.loadFile(indexPath, buildStatsWindowLoadFileOptions(options.getApiBaseUrl?.()));
|
||||
|
||||
statsWindow.on('closed', () => {
|
||||
options.onVisibilityChanged?.(false);
|
||||
statsWindow = null;
|
||||
});
|
||||
|
||||
statsWindow.webContents.on('before-input-event', (event, input) => {
|
||||
if (shouldHideStatsWindowForInput(input, options.getToggleKey())) {
|
||||
event.preventDefault();
|
||||
statsWindow?.hide();
|
||||
options.onVisibilityChanged?.(false);
|
||||
}
|
||||
});
|
||||
|
||||
statsWindow.once('ready-to-show', () => {
|
||||
if (!statsWindow) return;
|
||||
showStatsWindow(statsWindow, options);
|
||||
});
|
||||
|
||||
statsWindow.on('blur', () => {
|
||||
if (!statsWindow || statsWindow.isDestroyed() || !statsWindow.isVisible()) {
|
||||
return;
|
||||
}
|
||||
promoteStatsWindowLevel(statsWindow);
|
||||
});
|
||||
} else if (statsWindow.isVisible()) {
|
||||
statsWindow.hide();
|
||||
options.onVisibilityChanged?.(false);
|
||||
} else {
|
||||
showStatsWindow(statsWindow, options);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register the IPC command handler for toggling the overlay.
|
||||
* Call this once during app initialization.
|
||||
*/
|
||||
export function registerStatsOverlayToggle(options: StatsWindowOptions): void {
|
||||
if (toggleRegistered) return;
|
||||
toggleRegistered = true;
|
||||
ipcMain.on(IPC_CHANNELS.command.toggleStatsOverlay, () => {
|
||||
toggleStatsOverlay(options);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up — destroy the stats window if it exists.
|
||||
* Call during app quit.
|
||||
*/
|
||||
export function destroyStatsWindow(): void {
|
||||
if (statsWindow && !statsWindow.isDestroyed()) {
|
||||
statsWindow.destroy();
|
||||
statsWindow = null;
|
||||
}
|
||||
}
|
||||
245
src/core/services/subtitle-cue-parser.test.ts
Normal file
245
src/core/services/subtitle-cue-parser.test.ts
Normal file
@@ -0,0 +1,245 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { parseSrtCues, parseAssCues, parseSubtitleCues } from './subtitle-cue-parser';
|
||||
import type { SubtitleCue } from './subtitle-cue-parser';
|
||||
|
||||
test('parseSrtCues parses basic SRT content', () => {
|
||||
const content = [
|
||||
'1',
|
||||
'00:00:01,000 --> 00:00:04,000',
|
||||
'こんにちは',
|
||||
'',
|
||||
'2',
|
||||
'00:00:05,000 --> 00:00:08,500',
|
||||
'元気ですか',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseSrtCues(content);
|
||||
|
||||
assert.equal(cues.length, 2);
|
||||
assert.equal(cues[0]!.startTime, 1.0);
|
||||
assert.equal(cues[0]!.endTime, 4.0);
|
||||
assert.equal(cues[0]!.text, 'こんにちは');
|
||||
assert.equal(cues[1]!.startTime, 5.0);
|
||||
assert.equal(cues[1]!.endTime, 8.5);
|
||||
assert.equal(cues[1]!.text, '元気ですか');
|
||||
});
|
||||
|
||||
test('parseSrtCues handles multi-line subtitle text', () => {
|
||||
const content = ['1', '00:01:00,000 --> 00:01:05,000', 'これは', 'テストです', ''].join('\n');
|
||||
|
||||
const cues = parseSrtCues(content);
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'これは\nテストです');
|
||||
});
|
||||
|
||||
test('parseSrtCues handles hours in timestamps', () => {
|
||||
const content = ['1', '01:30:00,000 --> 01:30:05,000', 'テスト', ''].join('\n');
|
||||
|
||||
const cues = parseSrtCues(content);
|
||||
|
||||
assert.equal(cues[0]!.startTime, 5400.0);
|
||||
assert.equal(cues[0]!.endTime, 5405.0);
|
||||
});
|
||||
|
||||
test('parseSrtCues handles VTT-style dot separator', () => {
|
||||
const content = ['1', '00:00:01.000 --> 00:00:04.000', 'VTTスタイル', ''].join('\n');
|
||||
|
||||
const cues = parseSrtCues(content);
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.startTime, 1.0);
|
||||
});
|
||||
|
||||
test('parseSrtCues returns empty array for empty content', () => {
|
||||
assert.deepEqual(parseSrtCues(''), []);
|
||||
assert.deepEqual(parseSrtCues(' \n\n '), []);
|
||||
});
|
||||
|
||||
test('parseSrtCues skips malformed timing lines gracefully', () => {
|
||||
const content = [
|
||||
'1',
|
||||
'NOT A TIMING LINE',
|
||||
'テスト',
|
||||
'',
|
||||
'2',
|
||||
'00:00:01,000 --> 00:00:02,000',
|
||||
'有効',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseSrtCues(content);
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, '有効');
|
||||
});
|
||||
|
||||
test('parseAssCues parses basic ASS dialogue lines', () => {
|
||||
const content = [
|
||||
'[Script Info]',
|
||||
'Title: Test',
|
||||
'',
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,こんにちは',
|
||||
'Dialogue: 0,0:00:05.00,0:00:08.50,Default,,0,0,0,,元気ですか',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues.length, 2);
|
||||
assert.equal(cues[0]!.startTime, 1.0);
|
||||
assert.equal(cues[0]!.endTime, 4.0);
|
||||
assert.equal(cues[0]!.text, 'こんにちは');
|
||||
assert.equal(cues[1]!.startTime, 5.0);
|
||||
assert.equal(cues[1]!.endTime, 8.5);
|
||||
assert.equal(cues[1]!.text, '元気ですか');
|
||||
});
|
||||
|
||||
test('parseAssCues strips override tags from text', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,{\\b1}太字{\\b0}テスト',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues[0]!.text, '太字テスト');
|
||||
});
|
||||
|
||||
test('parseAssCues handles text containing commas', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,はい、そうです、ね',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues[0]!.text, 'はい、そうです、ね');
|
||||
});
|
||||
|
||||
test('parseAssCues handles \\N line breaks', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,一行目\\N二行目',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues[0]!.text, '一行目\\N二行目');
|
||||
});
|
||||
|
||||
test('parseAssCues returns empty for content without Events section', () => {
|
||||
const content = ['[Script Info]', 'Title: Test'].join('\n');
|
||||
|
||||
assert.deepEqual(parseAssCues(content), []);
|
||||
});
|
||||
|
||||
test('parseAssCues skips Comment lines', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Comment: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,これはコメント',
|
||||
'Dialogue: 0,0:00:05.00,0:00:08.00,Default,,0,0,0,,これは字幕',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'これは字幕');
|
||||
});
|
||||
|
||||
test('parseAssCues handles hour timestamps', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,1:30:00.00,1:30:05.00,Default,,0,0,0,,テスト',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues[0]!.startTime, 5400.0);
|
||||
assert.equal(cues[0]!.endTime, 5405.0);
|
||||
});
|
||||
|
||||
test('parseAssCues respects dynamic field ordering from the Format row', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Style, Start, End, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,Default,0:00:01.00,0:00:04.00,,0,0,0,,順番が違う',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseAssCues(content);
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.startTime, 1.0);
|
||||
assert.equal(cues[0]!.endTime, 4.0);
|
||||
assert.equal(cues[0]!.text, '順番が違う');
|
||||
});
|
||||
|
||||
test('parseSubtitleCues auto-detects SRT format', () => {
|
||||
const content = ['1', '00:00:01,000 --> 00:00:04,000', 'SRTテスト', ''].join('\n');
|
||||
|
||||
const cues = parseSubtitleCues(content, 'test.srt');
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'SRTテスト');
|
||||
});
|
||||
|
||||
test('parseSubtitleCues auto-detects ASS format', () => {
|
||||
const content = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:04.00,Default,,0,0,0,,ASSテスト',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseSubtitleCues(content, 'test.ass');
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'ASSテスト');
|
||||
});
|
||||
|
||||
test('parseSubtitleCues auto-detects VTT format', () => {
|
||||
const content = ['1', '00:00:01.000 --> 00:00:04.000', 'VTTテスト', ''].join('\n');
|
||||
|
||||
const cues = parseSubtitleCues(content, 'test.vtt');
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'VTTテスト');
|
||||
});
|
||||
|
||||
test('parseSubtitleCues returns empty for unknown format', () => {
|
||||
assert.deepEqual(parseSubtitleCues('random content', 'test.xyz'), []);
|
||||
});
|
||||
|
||||
test('parseSubtitleCues returns cues sorted by start time', () => {
|
||||
const content = [
|
||||
'1',
|
||||
'00:00:10,000 --> 00:00:14,000',
|
||||
'二番目',
|
||||
'',
|
||||
'2',
|
||||
'00:00:01,000 --> 00:00:04,000',
|
||||
'一番目',
|
||||
'',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseSubtitleCues(content, 'test.srt');
|
||||
assert.equal(cues[0]!.text, '一番目');
|
||||
assert.equal(cues[1]!.text, '二番目');
|
||||
});
|
||||
|
||||
test('parseSubtitleCues detects subtitle formats from remote URLs', () => {
|
||||
const assContent = [
|
||||
'[Events]',
|
||||
'Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text',
|
||||
'Dialogue: 0,0:00:01.00,0:00:02.00,Default,,0,0,0,,URLテスト',
|
||||
].join('\n');
|
||||
|
||||
const cues = parseSubtitleCues(assContent, 'https://host/subs.ass?lang=ja#track');
|
||||
|
||||
assert.equal(cues.length, 1);
|
||||
assert.equal(cues[0]!.text, 'URLテスト');
|
||||
});
|
||||
191
src/core/services/subtitle-cue-parser.ts
Normal file
191
src/core/services/subtitle-cue-parser.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
export interface SubtitleCue {
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
text: string;
|
||||
}
|
||||
|
||||
const SRT_TIMING_PATTERN =
|
||||
/^\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})\s*-->\s*(?:(\d{1,2}):)?(\d{2}):(\d{2})[,.](\d{1,3})/;
|
||||
|
||||
function parseTimestamp(
|
||||
hours: string | undefined,
|
||||
minutes: string,
|
||||
seconds: string,
|
||||
millis: string,
|
||||
): number {
|
||||
return (
|
||||
Number(hours || 0) * 3600 +
|
||||
Number(minutes) * 60 +
|
||||
Number(seconds) +
|
||||
Number(millis.padEnd(3, '0')) / 1000
|
||||
);
|
||||
}
|
||||
|
||||
export function parseSrtCues(content: string): SubtitleCue[] {
|
||||
const cues: SubtitleCue[] = [];
|
||||
const lines = content.split(/\r?\n/);
|
||||
let i = 0;
|
||||
|
||||
while (i < lines.length) {
|
||||
const line = lines[i]!;
|
||||
const timingMatch = SRT_TIMING_PATTERN.exec(line);
|
||||
if (!timingMatch) {
|
||||
i += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const startTime = parseTimestamp(
|
||||
timingMatch[1],
|
||||
timingMatch[2]!,
|
||||
timingMatch[3]!,
|
||||
timingMatch[4]!,
|
||||
);
|
||||
const endTime = parseTimestamp(
|
||||
timingMatch[5],
|
||||
timingMatch[6]!,
|
||||
timingMatch[7]!,
|
||||
timingMatch[8]!,
|
||||
);
|
||||
|
||||
i += 1;
|
||||
const textLines: string[] = [];
|
||||
while (i < lines.length && lines[i]!.trim() !== '') {
|
||||
textLines.push(lines[i]!);
|
||||
i += 1;
|
||||
}
|
||||
|
||||
const text = textLines.join('\n').trim();
|
||||
if (text) {
|
||||
cues.push({ startTime, endTime, text });
|
||||
}
|
||||
}
|
||||
|
||||
return cues;
|
||||
}
|
||||
|
||||
const ASS_OVERRIDE_TAG_PATTERN = /\{[^}]*\}/g;
|
||||
|
||||
const ASS_TIMING_PATTERN = /^(\d+):(\d{2}):(\d{2})\.(\d{1,2})$/;
|
||||
const ASS_FORMAT_PREFIX = 'Format:';
|
||||
const ASS_DIALOGUE_PREFIX = 'Dialogue:';
|
||||
|
||||
function parseAssTimestamp(raw: string): number | null {
|
||||
const match = ASS_TIMING_PATTERN.exec(raw.trim());
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
const hours = Number(match[1]);
|
||||
const minutes = Number(match[2]);
|
||||
const seconds = Number(match[3]);
|
||||
const centiseconds = Number(match[4]!.padEnd(2, '0'));
|
||||
return hours * 3600 + minutes * 60 + seconds + centiseconds / 100;
|
||||
}
|
||||
|
||||
export function parseAssCues(content: string): SubtitleCue[] {
|
||||
const cues: SubtitleCue[] = [];
|
||||
const lines = content.split(/\r?\n/);
|
||||
let inEventsSection = false;
|
||||
let startFieldIndex = -1;
|
||||
let endFieldIndex = -1;
|
||||
let textFieldIndex = -1;
|
||||
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
|
||||
if (trimmed.startsWith('[') && trimmed.endsWith(']')) {
|
||||
inEventsSection = trimmed.toLowerCase() === '[events]';
|
||||
if (!inEventsSection) {
|
||||
startFieldIndex = -1;
|
||||
endFieldIndex = -1;
|
||||
textFieldIndex = -1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inEventsSection) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (trimmed.startsWith(ASS_FORMAT_PREFIX)) {
|
||||
const formatFields = trimmed
|
||||
.slice(ASS_FORMAT_PREFIX.length)
|
||||
.split(',')
|
||||
.map((field) => field.trim().toLowerCase());
|
||||
startFieldIndex = formatFields.indexOf('start');
|
||||
endFieldIndex = formatFields.indexOf('end');
|
||||
textFieldIndex = formatFields.indexOf('text');
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!trimmed.startsWith(ASS_DIALOGUE_PREFIX)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (startFieldIndex < 0 || endFieldIndex < 0 || textFieldIndex < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const fields = trimmed.slice(ASS_DIALOGUE_PREFIX.length).split(',');
|
||||
if (
|
||||
startFieldIndex >= fields.length ||
|
||||
endFieldIndex >= fields.length ||
|
||||
textFieldIndex >= fields.length
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const startTime = parseAssTimestamp(fields[startFieldIndex]!);
|
||||
const endTime = parseAssTimestamp(fields[endFieldIndex]!);
|
||||
if (startTime === null || endTime === null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const rawText = fields
|
||||
.slice(textFieldIndex)
|
||||
.join(',')
|
||||
.replace(ASS_OVERRIDE_TAG_PATTERN, '')
|
||||
.trim();
|
||||
if (rawText) {
|
||||
cues.push({ startTime, endTime, text: rawText });
|
||||
}
|
||||
}
|
||||
|
||||
return cues;
|
||||
}
|
||||
|
||||
function detectSubtitleFormat(source: string): 'srt' | 'vtt' | 'ass' | 'ssa' | null {
|
||||
const [normalizedSource = source] =
|
||||
(() => {
|
||||
try {
|
||||
return /^[a-z]+:\/\//i.test(source) ? new URL(source).pathname : source;
|
||||
} catch {
|
||||
return source;
|
||||
}
|
||||
})().split(/[?#]/, 1)[0] ?? '';
|
||||
const ext = normalizedSource.split('.').pop()?.toLowerCase() ?? '';
|
||||
if (ext === 'srt') return 'srt';
|
||||
if (ext === 'vtt') return 'vtt';
|
||||
if (ext === 'ass' || ext === 'ssa') return 'ass';
|
||||
return null;
|
||||
}
|
||||
|
||||
export function parseSubtitleCues(content: string, filename: string): SubtitleCue[] {
|
||||
const format = detectSubtitleFormat(filename);
|
||||
let cues: SubtitleCue[];
|
||||
|
||||
switch (format) {
|
||||
case 'srt':
|
||||
case 'vtt':
|
||||
cues = parseSrtCues(content);
|
||||
break;
|
||||
case 'ass':
|
||||
case 'ssa':
|
||||
cues = parseAssCues(content);
|
||||
break;
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
|
||||
cues.sort((a, b) => a.startTime - b.startTime);
|
||||
return cues;
|
||||
}
|
||||
244
src/core/services/subtitle-prefetch.test.ts
Normal file
244
src/core/services/subtitle-prefetch.test.ts
Normal file
@@ -0,0 +1,244 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { computePriorityWindow, createSubtitlePrefetchService } from './subtitle-prefetch';
|
||||
import type { SubtitleCue } from './subtitle-cue-parser';
|
||||
import type { SubtitleData } from '../../types';
|
||||
|
||||
function makeCues(count: number, startOffset = 0): SubtitleCue[] {
|
||||
return Array.from({ length: count }, (_, i) => ({
|
||||
startTime: startOffset + i * 5,
|
||||
endTime: startOffset + i * 5 + 4,
|
||||
text: `line-${i}`,
|
||||
}));
|
||||
}
|
||||
|
||||
test('computePriorityWindow returns next N cues from current position', () => {
|
||||
const cues = makeCues(20);
|
||||
const window = computePriorityWindow(cues, 12.0, 5);
|
||||
|
||||
assert.equal(window.length, 5);
|
||||
// Position 12.0 falls during cue 2, so the active cue should be warmed first.
|
||||
assert.equal(window[0]!.text, 'line-2');
|
||||
assert.equal(window[4]!.text, 'line-6');
|
||||
});
|
||||
|
||||
test('computePriorityWindow clamps to remaining cues at end of file', () => {
|
||||
const cues = makeCues(5);
|
||||
const window = computePriorityWindow(cues, 18.0, 10);
|
||||
|
||||
// Position 18.0 is during cue 3 (start=15), so cue 3 and cue 4 remain.
|
||||
assert.equal(window.length, 2);
|
||||
assert.equal(window[0]!.text, 'line-3');
|
||||
assert.equal(window[1]!.text, 'line-4');
|
||||
});
|
||||
|
||||
test('computePriorityWindow returns empty when past all cues', () => {
|
||||
const cues = makeCues(3);
|
||||
const window = computePriorityWindow(cues, 999.0, 10);
|
||||
assert.equal(window.length, 0);
|
||||
});
|
||||
|
||||
test('computePriorityWindow at position 0 returns first N cues', () => {
|
||||
const cues = makeCues(20);
|
||||
const window = computePriorityWindow(cues, 0, 5);
|
||||
|
||||
assert.equal(window.length, 5);
|
||||
assert.equal(window[0]!.text, 'line-0');
|
||||
});
|
||||
|
||||
test('computePriorityWindow includes the active cue when current position is mid-line', () => {
|
||||
const cues = makeCues(20);
|
||||
const window = computePriorityWindow(cues, 18.0, 3);
|
||||
|
||||
assert.equal(window.length, 3);
|
||||
assert.equal(window[0]!.text, 'line-3');
|
||||
assert.equal(window[1]!.text, 'line-4');
|
||||
assert.equal(window[2]!.text, 'line-5');
|
||||
});
|
||||
|
||||
function flushMicrotasks(): Promise<void> {
|
||||
return new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
|
||||
test('prefetch service tokenizes priority window cues and caches them', async () => {
|
||||
const cues = makeCues(20);
|
||||
const cached: Map<string, SubtitleData> = new Map();
|
||||
let tokenizeCalls = 0;
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
preCacheTokenization: (text, data) => {
|
||||
cached.set(text, data);
|
||||
},
|
||||
isCacheFull: () => false,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
// Allow all async tokenization to complete
|
||||
for (let i = 0; i < 25; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
service.stop();
|
||||
|
||||
// Priority window (first 3) should be cached
|
||||
assert.ok(cached.has('line-0'));
|
||||
assert.ok(cached.has('line-1'));
|
||||
assert.ok(cached.has('line-2'));
|
||||
});
|
||||
|
||||
test('prefetch service stops when cache is full', async () => {
|
||||
const cues = makeCues(20);
|
||||
let tokenizeCalls = 0;
|
||||
let cacheSize = 0;
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
preCacheTokenization: () => {
|
||||
cacheSize += 1;
|
||||
},
|
||||
isCacheFull: () => cacheSize >= 5,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
for (let i = 0; i < 30; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
service.stop();
|
||||
|
||||
// Should have stopped at 5 (cache full), not tokenized all 20
|
||||
assert.ok(tokenizeCalls <= 6, `Expected <= 6 tokenize calls, got ${tokenizeCalls}`);
|
||||
});
|
||||
|
||||
test('prefetch service can be stopped mid-flight', async () => {
|
||||
const cues = makeCues(100);
|
||||
let tokenizeCalls = 0;
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
preCacheTokenization: () => {},
|
||||
isCacheFull: () => false,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
await flushMicrotasks();
|
||||
await flushMicrotasks();
|
||||
service.stop();
|
||||
const callsAtStop = tokenizeCalls;
|
||||
|
||||
// Wait more to confirm no further calls
|
||||
for (let i = 0; i < 10; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
|
||||
assert.equal(tokenizeCalls, callsAtStop, 'No further tokenize calls after stop');
|
||||
assert.ok(tokenizeCalls < 100, 'Should not have tokenized all cues');
|
||||
});
|
||||
|
||||
test('prefetch service onSeek re-prioritizes from new position', async () => {
|
||||
const cues = makeCues(20);
|
||||
const cachedTexts: string[] = [];
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => ({ text, tokens: [] }),
|
||||
preCacheTokenization: (text) => {
|
||||
cachedTexts.push(text);
|
||||
},
|
||||
isCacheFull: () => false,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
// Let a few cues process
|
||||
for (let i = 0; i < 5; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
|
||||
// Seek to near the end
|
||||
service.onSeek(80.0);
|
||||
for (let i = 0; i < 30; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
service.stop();
|
||||
|
||||
// After seek to 80.0, cues starting after 80.0 (line-17, line-18, line-19) should appear in cached
|
||||
const hasPostSeekCue = cachedTexts.some(
|
||||
(t) => t === 'line-17' || t === 'line-18' || t === 'line-19',
|
||||
);
|
||||
assert.ok(hasPostSeekCue, 'Should have cached cues after seek position');
|
||||
});
|
||||
|
||||
test('prefetch service still warms the priority window when cache is full', async () => {
|
||||
const cues = makeCues(20);
|
||||
const cachedTexts: string[] = [];
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => ({ text, tokens: [] }),
|
||||
preCacheTokenization: (text) => {
|
||||
cachedTexts.push(text);
|
||||
},
|
||||
isCacheFull: () => true,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
for (let i = 0; i < 10; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
service.stop();
|
||||
|
||||
assert.deepEqual(cachedTexts.slice(0, 3), ['line-0', 'line-1', 'line-2']);
|
||||
});
|
||||
|
||||
test('prefetch service pause/resume halts and continues tokenization', async () => {
|
||||
const cues = makeCues(20);
|
||||
let tokenizeCalls = 0;
|
||||
|
||||
const service = createSubtitlePrefetchService({
|
||||
cues,
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
preCacheTokenization: () => {},
|
||||
isCacheFull: () => false,
|
||||
priorityWindowSize: 3,
|
||||
});
|
||||
|
||||
service.start(0);
|
||||
await flushMicrotasks();
|
||||
await flushMicrotasks();
|
||||
service.pause();
|
||||
|
||||
const callsWhenPaused = tokenizeCalls;
|
||||
// Wait while paused
|
||||
for (let i = 0; i < 5; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
// Should not have advanced much (may have 1 in-flight)
|
||||
assert.ok(tokenizeCalls <= callsWhenPaused + 1, 'Should not tokenize much while paused');
|
||||
|
||||
service.resume();
|
||||
for (let i = 0; i < 30; i += 1) {
|
||||
await flushMicrotasks();
|
||||
}
|
||||
service.stop();
|
||||
|
||||
assert.ok(tokenizeCalls > callsWhenPaused + 1, 'Should resume tokenizing after unpause');
|
||||
});
|
||||
153
src/core/services/subtitle-prefetch.ts
Normal file
153
src/core/services/subtitle-prefetch.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import type { SubtitleCue } from './subtitle-cue-parser';
|
||||
import type { SubtitleData } from '../../types';
|
||||
|
||||
export interface SubtitlePrefetchServiceDeps {
|
||||
cues: SubtitleCue[];
|
||||
tokenizeSubtitle: (text: string) => Promise<SubtitleData | null>;
|
||||
preCacheTokenization: (text: string, data: SubtitleData) => void;
|
||||
isCacheFull: () => boolean;
|
||||
priorityWindowSize?: number;
|
||||
}
|
||||
|
||||
export interface SubtitlePrefetchService {
|
||||
start: (currentTimeSeconds: number) => void;
|
||||
stop: () => void;
|
||||
onSeek: (newTimeSeconds: number) => void;
|
||||
pause: () => void;
|
||||
resume: () => void;
|
||||
}
|
||||
|
||||
const DEFAULT_PRIORITY_WINDOW_SIZE = 10;
|
||||
|
||||
export function computePriorityWindow(
|
||||
cues: SubtitleCue[],
|
||||
currentTimeSeconds: number,
|
||||
windowSize: number,
|
||||
): SubtitleCue[] {
|
||||
if (cues.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
// Find the first cue whose end time is after the current position.
|
||||
// This includes the currently active cue when playback starts or seeks
|
||||
// mid-line, while still skipping cues that have already finished.
|
||||
let startIndex = -1;
|
||||
for (let i = 0; i < cues.length; i += 1) {
|
||||
if (cues[i]!.endTime > currentTimeSeconds) {
|
||||
startIndex = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (startIndex < 0) {
|
||||
// All cues are before current time
|
||||
return [];
|
||||
}
|
||||
|
||||
return cues.slice(startIndex, startIndex + windowSize);
|
||||
}
|
||||
|
||||
export function createSubtitlePrefetchService(
|
||||
deps: SubtitlePrefetchServiceDeps,
|
||||
): SubtitlePrefetchService {
|
||||
const windowSize = deps.priorityWindowSize ?? DEFAULT_PRIORITY_WINDOW_SIZE;
|
||||
let stopped = true;
|
||||
let paused = false;
|
||||
let currentRunId = 0;
|
||||
|
||||
async function tokenizeCueList(
|
||||
cuesToProcess: SubtitleCue[],
|
||||
runId: number,
|
||||
options: { allowWhenCacheFull?: boolean } = {},
|
||||
): Promise<void> {
|
||||
for (const cue of cuesToProcess) {
|
||||
if (stopped || runId !== currentRunId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait while paused
|
||||
while (paused && !stopped && runId === currentRunId) {
|
||||
await new Promise((resolve) => setTimeout(resolve, 10));
|
||||
}
|
||||
|
||||
if (stopped || runId !== currentRunId) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!options.allowWhenCacheFull && deps.isCacheFull()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await deps.tokenizeSubtitle(cue.text);
|
||||
if (result && !stopped && runId === currentRunId) {
|
||||
deps.preCacheTokenization(cue.text, result);
|
||||
}
|
||||
} catch {
|
||||
// Skip failed cues, continue prefetching
|
||||
}
|
||||
|
||||
// Yield to allow live processing to take priority
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
async function startPrefetching(currentTimeSeconds: number, runId: number): Promise<void> {
|
||||
const cues = deps.cues;
|
||||
|
||||
// Phase 1: Priority window
|
||||
const priorityCues = computePriorityWindow(cues, currentTimeSeconds, windowSize);
|
||||
await tokenizeCueList(priorityCues, runId, { allowWhenCacheFull: true });
|
||||
|
||||
if (stopped || runId !== currentRunId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Phase 2: Background - remaining cues forward from current position
|
||||
const priorityTexts = new Set(priorityCues.map((c) => c.text));
|
||||
const remainingCues = cues.filter(
|
||||
(cue) => cue.startTime > currentTimeSeconds && !priorityTexts.has(cue.text),
|
||||
);
|
||||
await tokenizeCueList(remainingCues, runId);
|
||||
|
||||
if (stopped || runId !== currentRunId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Phase 3: Background - earlier cues (for rewind support)
|
||||
const earlierCues = cues.filter(
|
||||
(cue) => cue.startTime <= currentTimeSeconds && !priorityTexts.has(cue.text),
|
||||
);
|
||||
await tokenizeCueList(earlierCues, runId);
|
||||
}
|
||||
|
||||
return {
|
||||
start(currentTimeSeconds: number) {
|
||||
stopped = false;
|
||||
paused = false;
|
||||
currentRunId += 1;
|
||||
const runId = currentRunId;
|
||||
void startPrefetching(currentTimeSeconds, runId);
|
||||
},
|
||||
|
||||
stop() {
|
||||
stopped = true;
|
||||
currentRunId += 1;
|
||||
},
|
||||
|
||||
onSeek(newTimeSeconds: number) {
|
||||
// Cancel current run and restart from new position
|
||||
currentRunId += 1;
|
||||
const runId = currentRunId;
|
||||
void startPrefetching(newTimeSeconds, runId);
|
||||
},
|
||||
|
||||
pause() {
|
||||
paused = true;
|
||||
},
|
||||
|
||||
resume() {
|
||||
paused = false;
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -170,3 +170,87 @@ test('subtitle processing cache invalidation only affects future subtitle events
|
||||
|
||||
assert.equal(callsByText.get('same'), 2);
|
||||
});
|
||||
|
||||
test('preCacheTokenization stores entry that is returned on next subtitle change', async () => {
|
||||
const emitted: SubtitleData[] = [];
|
||||
let tokenizeCalls = 0;
|
||||
const controller = createSubtitleProcessingController({
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
emitSubtitle: (payload) => emitted.push(payload),
|
||||
});
|
||||
|
||||
controller.preCacheTokenization('予め', { text: '予め', tokens: [] });
|
||||
controller.onSubtitleChange('予め');
|
||||
await flushMicrotasks();
|
||||
|
||||
assert.equal(tokenizeCalls, 0, 'should not call tokenize when pre-cached');
|
||||
assert.deepEqual(emitted, [{ text: '予め', tokens: [] }]);
|
||||
});
|
||||
|
||||
test('preCacheTokenization reuses normalized subtitle text across ASS linebreak variants', async () => {
|
||||
const emitted: SubtitleData[] = [];
|
||||
let tokenizeCalls = 0;
|
||||
const controller = createSubtitleProcessingController({
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
emitSubtitle: (payload) => emitted.push(payload),
|
||||
});
|
||||
|
||||
controller.preCacheTokenization('一行目\\N二行目', { text: '一行目\n二行目', tokens: [] });
|
||||
controller.onSubtitleChange('一行目\n二行目');
|
||||
await flushMicrotasks();
|
||||
|
||||
assert.equal(tokenizeCalls, 0, 'should not call tokenize when normalized text matches');
|
||||
assert.deepEqual(emitted, [{ text: '一行目\n二行目', tokens: [] }]);
|
||||
});
|
||||
|
||||
test('consumeCachedSubtitle returns prefetched payload and prevents reprocessing same line', async () => {
|
||||
const emitted: SubtitleData[] = [];
|
||||
let tokenizeCalls = 0;
|
||||
const controller = createSubtitleProcessingController({
|
||||
tokenizeSubtitle: async (text) => {
|
||||
tokenizeCalls += 1;
|
||||
return { text, tokens: [] };
|
||||
},
|
||||
emitSubtitle: (payload) => emitted.push(payload),
|
||||
});
|
||||
|
||||
controller.preCacheTokenization('猫\\Nです', { text: '猫\nです', tokens: [] });
|
||||
|
||||
const immediate = controller.consumeCachedSubtitle('猫\nです');
|
||||
assert.deepEqual(immediate, { text: '猫\nです', tokens: [] });
|
||||
|
||||
controller.onSubtitleChange('猫\nです');
|
||||
await flushMicrotasks();
|
||||
|
||||
assert.equal(tokenizeCalls, 0, 'same cached subtitle should not reprocess after immediate consume');
|
||||
assert.deepEqual(emitted, []);
|
||||
});
|
||||
|
||||
test('isCacheFull returns false when cache is below limit', () => {
|
||||
const controller = createSubtitleProcessingController({
|
||||
tokenizeSubtitle: async (text) => ({ text, tokens: null }),
|
||||
emitSubtitle: () => {},
|
||||
});
|
||||
|
||||
assert.equal(controller.isCacheFull(), false);
|
||||
});
|
||||
|
||||
test('isCacheFull returns true when cache reaches limit', async () => {
|
||||
const controller = createSubtitleProcessingController({
|
||||
tokenizeSubtitle: async (text) => ({ text, tokens: [] }),
|
||||
emitSubtitle: () => {},
|
||||
});
|
||||
|
||||
// Fill cache to the 256 limit
|
||||
for (let i = 0; i < 256; i += 1) {
|
||||
controller.preCacheTokenization(`line-${i}`, { text: `line-${i}`, tokens: [] });
|
||||
}
|
||||
|
||||
assert.equal(controller.isCacheFull(), true);
|
||||
});
|
||||
|
||||
@@ -11,6 +11,13 @@ export interface SubtitleProcessingController {
|
||||
onSubtitleChange: (text: string) => void;
|
||||
refreshCurrentSubtitle: (textOverride?: string) => void;
|
||||
invalidateTokenizationCache: () => void;
|
||||
preCacheTokenization: (text: string, data: SubtitleData) => void;
|
||||
consumeCachedSubtitle: (text: string) => SubtitleData | null;
|
||||
isCacheFull: () => boolean;
|
||||
}
|
||||
|
||||
function normalizeSubtitleCacheKey(text: string): string {
|
||||
return text.replace(/\r\n/g, '\n').replace(/\\N/g, '\n').replace(/\\n/g, '\n').trim();
|
||||
}
|
||||
|
||||
export function createSubtitleProcessingController(
|
||||
@@ -26,18 +33,19 @@ export function createSubtitleProcessingController(
|
||||
const now = deps.now ?? (() => Date.now());
|
||||
|
||||
const getCachedTokenization = (text: string): SubtitleData | null => {
|
||||
const cached = tokenizationCache.get(text);
|
||||
const cacheKey = normalizeSubtitleCacheKey(text);
|
||||
const cached = tokenizationCache.get(cacheKey);
|
||||
if (!cached) {
|
||||
return null;
|
||||
}
|
||||
|
||||
tokenizationCache.delete(text);
|
||||
tokenizationCache.set(text, cached);
|
||||
tokenizationCache.delete(cacheKey);
|
||||
tokenizationCache.set(cacheKey, cached);
|
||||
return cached;
|
||||
};
|
||||
|
||||
const setCachedTokenization = (text: string, payload: SubtitleData): void => {
|
||||
tokenizationCache.set(text, payload);
|
||||
tokenizationCache.set(normalizeSubtitleCacheKey(text), payload);
|
||||
while (tokenizationCache.size > SUBTITLE_TOKENIZATION_CACHE_LIMIT) {
|
||||
const firstKey = tokenizationCache.keys().next().value;
|
||||
if (firstKey !== undefined) {
|
||||
@@ -130,5 +138,22 @@ export function createSubtitleProcessingController(
|
||||
invalidateTokenizationCache: () => {
|
||||
tokenizationCache.clear();
|
||||
},
|
||||
preCacheTokenization: (text: string, data: SubtitleData) => {
|
||||
setCachedTokenization(text, data);
|
||||
},
|
||||
consumeCachedSubtitle: (text: string) => {
|
||||
const cached = getCachedTokenization(text);
|
||||
if (!cached) {
|
||||
return null;
|
||||
}
|
||||
|
||||
latestText = text;
|
||||
lastEmittedText = text;
|
||||
refreshRequested = false;
|
||||
return cached;
|
||||
},
|
||||
isCacheFull: () => {
|
||||
return tokenizationCache.size >= SUBTITLE_TOKENIZATION_CACHE_LIMIT;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -108,8 +108,9 @@ test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence'
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: false,
|
||||
isNPlusOneTarget: false,
|
||||
isNPlusOneTarget: true,
|
||||
isNameMatch: true,
|
||||
jlptLevel: 'N5',
|
||||
frequencyRank: 12,
|
||||
},
|
||||
],
|
||||
@@ -122,9 +123,35 @@ test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence'
|
||||
);
|
||||
assert.match(
|
||||
markup,
|
||||
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
|
||||
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア">アレクシア<\/span>/,
|
||||
);
|
||||
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
|
||||
assert.doesNotMatch(markup, /word-name-match word-n-plus-one|word-n-plus-one word-name-match/);
|
||||
assert.doesNotMatch(markup, /data-frequency-rank="12"|data-jlpt-level="N5"|word-jlpt-n5/);
|
||||
});
|
||||
|
||||
test('serializeSubtitleMarkup keeps filtered tokens hoverable without annotation attrs', () => {
|
||||
const payload: SubtitleData = {
|
||||
text: 'は',
|
||||
tokens: [
|
||||
{
|
||||
surface: 'は',
|
||||
reading: 'は',
|
||||
headword: 'は',
|
||||
startPos: 0,
|
||||
endPos: 1,
|
||||
partOfSpeech: PartOfSpeech.particle,
|
||||
pos1: '助詞',
|
||||
isMerged: false,
|
||||
isKnown: false,
|
||||
isNPlusOneTarget: false,
|
||||
isNameMatch: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const markup = serializeSubtitleMarkup(payload, frequencyOptions);
|
||||
assert.equal(markup, '<span class="word" data-reading="は" data-headword="は">は</span>');
|
||||
});
|
||||
|
||||
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
||||
|
||||
@@ -47,10 +47,15 @@ function escapeHtml(text: string): string {
|
||||
.replaceAll("'", ''');
|
||||
}
|
||||
|
||||
function hasPrioritizedNameMatch(token: MergedToken): boolean {
|
||||
return token.isNameMatch === true;
|
||||
}
|
||||
|
||||
function computeFrequencyClass(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string | null {
|
||||
if (hasPrioritizedNameMatch(token)) return null;
|
||||
if (!options.enabled) return null;
|
||||
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
|
||||
|
||||
@@ -70,6 +75,7 @@ function getFrequencyRankLabel(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string | null {
|
||||
if (hasPrioritizedNameMatch(token)) return null;
|
||||
if (!options.enabled) return null;
|
||||
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
|
||||
|
||||
@@ -79,21 +85,25 @@ function getFrequencyRankLabel(
|
||||
}
|
||||
|
||||
function getJlptLevelLabel(token: MergedToken): string | null {
|
||||
if (hasPrioritizedNameMatch(token)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return token.jlptLevel ?? null;
|
||||
}
|
||||
|
||||
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
||||
const classes = ['word'];
|
||||
|
||||
if (token.isNPlusOneTarget) {
|
||||
classes.push('word-n-plus-one');
|
||||
} else if (token.isNameMatch) {
|
||||
if (hasPrioritizedNameMatch(token)) {
|
||||
classes.push('word-name-match');
|
||||
} else if (token.isNPlusOneTarget) {
|
||||
classes.push('word-n-plus-one');
|
||||
} else if (token.isKnown) {
|
||||
classes.push('word-known');
|
||||
}
|
||||
|
||||
if (token.jlptLevel) {
|
||||
if (!hasPrioritizedNameMatch(token) && token.jlptLevel) {
|
||||
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
||||
}
|
||||
|
||||
@@ -137,6 +147,8 @@ function serializeSubtitleToken(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): SerializedSubtitleToken {
|
||||
const prioritizedNameMatch = hasPrioritizedNameMatch(token);
|
||||
|
||||
return {
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
@@ -146,10 +158,10 @@ function serializeSubtitleToken(
|
||||
partOfSpeech: token.partOfSpeech,
|
||||
isMerged: token.isMerged,
|
||||
isKnown: token.isKnown,
|
||||
isNPlusOneTarget: token.isNPlusOneTarget,
|
||||
isNPlusOneTarget: prioritizedNameMatch ? false : token.isNPlusOneTarget,
|
||||
isNameMatch: token.isNameMatch ?? false,
|
||||
jlptLevel: token.jlptLevel,
|
||||
frequencyRank: token.frequencyRank,
|
||||
jlptLevel: prioritizedNameMatch ? undefined : token.jlptLevel,
|
||||
frequencyRank: prioritizedNameMatch ? undefined : token.frequencyRank,
|
||||
className: computeWordClass(token, options),
|
||||
frequencyRankLabel: getFrequencyRankLabel(token, options),
|
||||
jlptLevelLabel: getJlptLevelLabel(token),
|
||||
|
||||
@@ -1,23 +1,72 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { injectTexthookerBootstrapHtml } from './texthooker';
|
||||
import { injectTexthookerBootstrapHtml, type TexthookerBootstrapSettings } from './texthooker';
|
||||
|
||||
test('injectTexthookerBootstrapHtml injects websocket bootstrap before head close', () => {
|
||||
const html = '<html><head><title>Texthooker</title></head><body></body></html>';
|
||||
|
||||
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678');
|
||||
const settings: TexthookerBootstrapSettings = {
|
||||
enableKnownWordColoring: true,
|
||||
enableNPlusOneColoring: true,
|
||||
enableNameMatchColoring: true,
|
||||
enableFrequencyColoring: true,
|
||||
enableJlptColoring: true,
|
||||
characterDictionaryEnabled: true,
|
||||
knownWordColor: '#a6da95',
|
||||
nPlusOneColor: '#c6a0f6',
|
||||
nameMatchColor: '#f5bde6',
|
||||
hoverTokenColor: '#f4dbd6',
|
||||
hoverTokenBackgroundColor: 'rgba(54, 58, 79, 0.84)',
|
||||
jlptColors: {
|
||||
N1: '#ed8796',
|
||||
N2: '#f5a97f',
|
||||
N3: '#f9e2af',
|
||||
N4: '#a6e3a1',
|
||||
N5: '#8aadf4',
|
||||
},
|
||||
frequencyDictionary: {
|
||||
singleColor: '#f5a97f',
|
||||
bandedColors: ['#ed8796', '#f5a97f', '#f9e2af', '#8bd5ca', '#8aadf4'],
|
||||
},
|
||||
};
|
||||
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678', settings);
|
||||
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-enableKnownWordColoring', "1"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-enableNPlusOneColoring', "1"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-enableNameMatchColoring', "1"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-enableFrequencyColoring', "1"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-enableJlptColoring', "1"\)/,
|
||||
);
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-characterDictionaryEnabled', "1"\)/,
|
||||
);
|
||||
assert.match(actual, /--subminer-known-word-color:\s*#a6da95;/);
|
||||
assert.match(actual, /--subminer-n-plus-one-color:\s*#c6a0f6;/);
|
||||
assert.match(actual, /--subminer-name-match-color:\s*#f5bde6;/);
|
||||
assert.match(actual, /--subminer-jlpt-n1-color:\s*#ed8796;/);
|
||||
assert.match(actual, /--subminer-frequency-band-4-color:\s*#8bd5ca;/);
|
||||
assert.match(actual, /--sm-token-hover-bg:\s*rgba\(54, 58, 79, 0\.84\);/);
|
||||
assert.doesNotMatch(actual, /p \.word\.word-known\s*\{/);
|
||||
assert.ok(actual.indexOf('</script></head>') !== -1);
|
||||
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableFrequencyColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableJlptColoring'));
|
||||
});
|
||||
|
||||
test('injectTexthookerBootstrapHtml leaves html unchanged without websocketUrl', () => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user