mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-20 12:11:28 -07:00
Enhance AniList character dictionary sync and subtitle features (#15)
This commit is contained in:
@@ -1,6 +1,5 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import * as childProcess from 'child_process';
|
||||
|
||||
import { guessAnilistMediaInfo, updateAnilistPostWatchProgress } from './anilist-updater';
|
||||
|
||||
@@ -12,67 +11,27 @@ function createJsonResponse(payload: unknown): Response {
|
||||
}
|
||||
|
||||
test('guessAnilistMediaInfo uses guessit output when available', async () => {
|
||||
const originalExecFile = childProcess.execFile;
|
||||
(
|
||||
childProcess as unknown as {
|
||||
execFile: typeof childProcess.execFile;
|
||||
}
|
||||
).execFile = ((...args: unknown[]) => {
|
||||
const callback = args[args.length - 1];
|
||||
const cb =
|
||||
typeof callback === 'function'
|
||||
? (callback as (error: Error | null, stdout: string, stderr: string) => void)
|
||||
: null;
|
||||
cb?.(null, JSON.stringify({ title: 'Guessit Title', episode: 7 }), '');
|
||||
return {} as childProcess.ChildProcess;
|
||||
}) as typeof childProcess.execFile;
|
||||
|
||||
try {
|
||||
const result = await guessAnilistMediaInfo('/tmp/demo.mkv', null);
|
||||
assert.deepEqual(result, {
|
||||
title: 'Guessit Title',
|
||||
episode: 7,
|
||||
source: 'guessit',
|
||||
});
|
||||
} finally {
|
||||
(
|
||||
childProcess as unknown as {
|
||||
execFile: typeof childProcess.execFile;
|
||||
}
|
||||
).execFile = originalExecFile;
|
||||
}
|
||||
const result = await guessAnilistMediaInfo('/tmp/demo.mkv', null, {
|
||||
runGuessit: async () => JSON.stringify({ title: 'Guessit Title', episode: 7 }),
|
||||
});
|
||||
assert.deepEqual(result, {
|
||||
title: 'Guessit Title',
|
||||
episode: 7,
|
||||
source: 'guessit',
|
||||
});
|
||||
});
|
||||
|
||||
test('guessAnilistMediaInfo falls back to parser when guessit fails', async () => {
|
||||
const originalExecFile = childProcess.execFile;
|
||||
(
|
||||
childProcess as unknown as {
|
||||
execFile: typeof childProcess.execFile;
|
||||
}
|
||||
).execFile = ((...args: unknown[]) => {
|
||||
const callback = args[args.length - 1];
|
||||
const cb =
|
||||
typeof callback === 'function'
|
||||
? (callback as (error: Error | null, stdout: string, stderr: string) => void)
|
||||
: null;
|
||||
cb?.(new Error('guessit not found'), '', '');
|
||||
return {} as childProcess.ChildProcess;
|
||||
}) as typeof childProcess.execFile;
|
||||
|
||||
try {
|
||||
const result = await guessAnilistMediaInfo('/tmp/My Anime S01E03.mkv', null);
|
||||
assert.deepEqual(result, {
|
||||
title: 'My Anime',
|
||||
episode: 3,
|
||||
source: 'fallback',
|
||||
});
|
||||
} finally {
|
||||
(
|
||||
childProcess as unknown as {
|
||||
execFile: typeof childProcess.execFile;
|
||||
}
|
||||
).execFile = originalExecFile;
|
||||
}
|
||||
const result = await guessAnilistMediaInfo('/tmp/My Anime S01E03.mkv', null, {
|
||||
runGuessit: async () => {
|
||||
throw new Error('guessit not found');
|
||||
},
|
||||
});
|
||||
assert.deepEqual(result, {
|
||||
title: 'My Anime',
|
||||
episode: 3,
|
||||
source: 'fallback',
|
||||
});
|
||||
});
|
||||
|
||||
test('updateAnilistPostWatchProgress updates progress when behind', async () => {
|
||||
|
||||
@@ -72,6 +72,10 @@ function runGuessit(target: string): Promise<string> {
|
||||
});
|
||||
}
|
||||
|
||||
type GuessAnilistMediaInfoDeps = {
|
||||
runGuessit: (target: string) => Promise<string>;
|
||||
};
|
||||
|
||||
function firstString(value: unknown): string | null {
|
||||
if (typeof value === 'string') {
|
||||
const trimmed = value.trim();
|
||||
@@ -177,12 +181,13 @@ function pickBestSearchResult(
|
||||
export async function guessAnilistMediaInfo(
|
||||
mediaPath: string | null,
|
||||
mediaTitle: string | null,
|
||||
deps: GuessAnilistMediaInfoDeps = { runGuessit },
|
||||
): Promise<AnilistMediaGuess | null> {
|
||||
const target = mediaPath ?? mediaTitle;
|
||||
|
||||
if (target && target.trim().length > 0) {
|
||||
try {
|
||||
const stdout = await runGuessit(target);
|
||||
const stdout = await deps.runGuessit(target);
|
||||
const parsed = JSON.parse(stdout) as Record<string, unknown>;
|
||||
const title = firstString(parsed.title);
|
||||
const episode = firstPositiveInteger(parsed.episode);
|
||||
|
||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
toggle: false,
|
||||
toggleVisibleOverlay: false,
|
||||
settings: false,
|
||||
setup: false,
|
||||
show: false,
|
||||
hide: false,
|
||||
showVisibleOverlay: false,
|
||||
@@ -30,6 +31,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistLogout: false,
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
|
||||
@@ -4,7 +4,8 @@ import { AppReadyRuntimeDeps, runAppReadyRuntime } from './startup';
|
||||
|
||||
function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||
const calls: string[] = [];
|
||||
const deps: AppReadyRuntimeDeps = {
|
||||
const deps = {
|
||||
ensureDefaultConfigBootstrap: () => calls.push('ensureDefaultConfigBootstrap'),
|
||||
loadSubtitlePosition: () => calls.push('loadSubtitlePosition'),
|
||||
resolveKeybindings: () => calls.push('resolveKeybindings'),
|
||||
createMpvClient: () => calls.push('createMpvClient'),
|
||||
@@ -20,8 +21,13 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||
setSecondarySubMode: (mode) => calls.push(`setSecondarySubMode:${mode}`),
|
||||
defaultSecondarySubMode: 'hover',
|
||||
defaultWebsocketPort: 9001,
|
||||
defaultAnnotationWebsocketPort: 6678,
|
||||
defaultTexthookerPort: 5174,
|
||||
hasMpvWebsocketPlugin: () => true,
|
||||
startSubtitleWebsocket: (port) => calls.push(`startSubtitleWebsocket:${port}`),
|
||||
startAnnotationWebsocket: (port) => calls.push(`startAnnotationWebsocket:${port}`),
|
||||
startTexthooker: (port, websocketUrl) =>
|
||||
calls.push(`startTexthooker:${port}:${websocketUrl ?? ''}`),
|
||||
log: (message) => calls.push(`log:${message}`),
|
||||
createMecabTokenizerAndCheck: async () => {
|
||||
calls.push('createMecabTokenizerAndCheck');
|
||||
@@ -34,6 +40,9 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||
loadYomitanExtension: async () => {
|
||||
calls.push('loadYomitanExtension');
|
||||
},
|
||||
handleFirstRunSetup: async () => {
|
||||
calls.push('handleFirstRunSetup');
|
||||
},
|
||||
prewarmSubtitleDictionaries: async () => {
|
||||
calls.push('prewarmSubtitleDictionaries');
|
||||
},
|
||||
@@ -42,12 +51,13 @@ function makeDeps(overrides: Partial<AppReadyRuntimeDeps> = {}) {
|
||||
},
|
||||
texthookerOnlyMode: false,
|
||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
|
||||
setVisibleOverlayVisible: (visible) => calls.push(`setVisibleOverlayVisible:${visible}`),
|
||||
initializeOverlayRuntime: () => calls.push('initializeOverlayRuntime'),
|
||||
handleInitialArgs: () => calls.push('handleInitialArgs'),
|
||||
logDebug: (message) => calls.push(`debug:${message}`),
|
||||
now: () => 1000,
|
||||
...overrides,
|
||||
};
|
||||
} as AppReadyRuntimeDeps;
|
||||
return { deps, calls };
|
||||
}
|
||||
|
||||
@@ -56,8 +66,14 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
||||
hasMpvWebsocketPlugin: () => false,
|
||||
});
|
||||
await runAppReadyRuntime(deps);
|
||||
assert.ok(calls.includes('ensureDefaultConfigBootstrap'));
|
||||
assert.ok(calls.includes('startSubtitleWebsocket:9001'));
|
||||
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
||||
assert.ok(calls.includes('setVisibleOverlayVisible:true'));
|
||||
assert.ok(calls.includes('initializeOverlayRuntime'));
|
||||
assert.ok(
|
||||
calls.indexOf('setVisibleOverlayVisible:true') < calls.indexOf('initializeOverlayRuntime'),
|
||||
);
|
||||
assert.ok(calls.includes('startBackgroundWarmups'));
|
||||
assert.ok(
|
||||
calls.includes(
|
||||
@@ -66,6 +82,46 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
||||
);
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime starts texthooker on startup when enabled in config', async () => {
|
||||
const { deps, calls } = makeDeps({
|
||||
getResolvedConfig: () => ({
|
||||
websocket: { enabled: 'auto' },
|
||||
secondarySub: {},
|
||||
texthooker: { launchAtStartup: true },
|
||||
}),
|
||||
});
|
||||
|
||||
await runAppReadyRuntime(deps);
|
||||
|
||||
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
||||
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
||||
assert.ok(
|
||||
calls.indexOf('createMpvClient') < calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678'),
|
||||
);
|
||||
assert.ok(
|
||||
calls.indexOf('startTexthooker:5174:ws://127.0.0.1:6678') < calls.indexOf('handleInitialArgs'),
|
||||
);
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime keeps annotation websocket enabled when regular websocket auto-skips', async () => {
|
||||
const { deps, calls } = makeDeps({
|
||||
getResolvedConfig: () => ({
|
||||
websocket: { enabled: 'auto' },
|
||||
annotationWebsocket: { enabled: true, port: 6678 },
|
||||
secondarySub: {},
|
||||
texthooker: { launchAtStartup: true },
|
||||
}),
|
||||
hasMpvWebsocketPlugin: () => true,
|
||||
});
|
||||
|
||||
await runAppReadyRuntime(deps);
|
||||
|
||||
assert.equal(calls.includes('startSubtitleWebsocket:9001'), false);
|
||||
assert.ok(calls.includes('startAnnotationWebsocket:6678'));
|
||||
assert.ok(calls.includes('startTexthooker:5174:ws://127.0.0.1:6678'));
|
||||
assert.ok(calls.includes('log:mpv_websocket detected, skipping built-in WebSocket server'));
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns true', async () => {
|
||||
const { deps, calls } = makeDeps({
|
||||
shouldSkipHeavyStartup: () => true,
|
||||
@@ -97,6 +153,7 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
||||
|
||||
await runAppReadyRuntime(deps);
|
||||
|
||||
assert.equal(calls.includes('ensureDefaultConfigBootstrap'), true);
|
||||
assert.equal(calls.includes('reloadConfig'), false);
|
||||
assert.equal(calls.includes('getResolvedConfig'), false);
|
||||
assert.equal(calls.includes('getConfigWarnings'), false);
|
||||
@@ -111,7 +168,10 @@ test('runAppReadyRuntime skips heavy startup when shouldSkipHeavyStartup returns
|
||||
assert.equal(calls.includes('logConfigWarning'), false);
|
||||
assert.equal(calls.includes('handleInitialArgs'), true);
|
||||
assert.equal(calls.includes('loadYomitanExtension'), true);
|
||||
assert.equal(calls.includes('handleFirstRunSetup'), true);
|
||||
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleInitialArgs'));
|
||||
assert.ok(calls.indexOf('loadYomitanExtension') < calls.indexOf('handleFirstRunSetup'));
|
||||
assert.ok(calls.indexOf('handleFirstRunSetup') < calls.indexOf('handleInitialArgs'));
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime skips Jellyfin remote startup when dependency is not wired', async () => {
|
||||
|
||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
toggle: false,
|
||||
toggleVisibleOverlay: false,
|
||||
settings: false,
|
||||
setup: false,
|
||||
show: false,
|
||||
hide: false,
|
||||
showVisibleOverlay: false,
|
||||
@@ -30,6 +31,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistLogout: false,
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
@@ -95,6 +97,9 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
|
||||
openYomitanSettingsDelayed: (delayMs) => {
|
||||
calls.push(`openYomitanSettingsDelayed:${delayMs}`);
|
||||
},
|
||||
openFirstRunSetup: () => {
|
||||
calls.push('openFirstRunSetup');
|
||||
},
|
||||
setVisibleOverlayVisible: (visible) => {
|
||||
calls.push(`setVisibleOverlayVisible:${visible}`);
|
||||
},
|
||||
@@ -163,6 +168,13 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
|
||||
calls.push('retryAnilistQueue');
|
||||
return { ok: true, message: 'AniList retry processed.' };
|
||||
},
|
||||
generateCharacterDictionary: async () => ({
|
||||
zipPath: '/tmp/anilist-1.zip',
|
||||
fromCache: false,
|
||||
mediaId: 1,
|
||||
mediaTitle: 'Test',
|
||||
entryCount: 10,
|
||||
}),
|
||||
runJellyfinCommand: async () => {
|
||||
calls.push('runJellyfinCommand');
|
||||
},
|
||||
@@ -221,6 +233,16 @@ test('handleCliCommand processes --start for second-instance when overlay runtim
|
||||
);
|
||||
});
|
||||
|
||||
test('handleCliCommand opens first-run setup window for --setup', () => {
|
||||
const { deps, calls } = createDeps();
|
||||
|
||||
handleCliCommand(makeArgs({ setup: true }), 'initial', deps);
|
||||
|
||||
assert.ok(calls.includes('openFirstRunSetup'));
|
||||
assert.ok(calls.includes('log:Opened first-run setup flow.'));
|
||||
assert.equal(calls.includes('openYomitanSettingsDelayed:1000'), false);
|
||||
});
|
||||
|
||||
test('handleCliCommand applies cli log level for second-instance commands', () => {
|
||||
const { deps, calls } = createDeps({
|
||||
setLogLevel: (level) => {
|
||||
@@ -396,6 +418,52 @@ test('handleCliCommand runs AniList retry command', async () => {
|
||||
assert.ok(calls.includes('log:AniList retry processed.'));
|
||||
});
|
||||
|
||||
test('handleCliCommand runs dictionary generation command', async () => {
|
||||
const { deps, calls } = createDeps({
|
||||
hasMainWindow: () => false,
|
||||
generateCharacterDictionary: async () => ({
|
||||
zipPath: '/tmp/anilist-9253.zip',
|
||||
fromCache: true,
|
||||
mediaId: 9253,
|
||||
mediaTitle: 'STEINS;GATE',
|
||||
entryCount: 314,
|
||||
}),
|
||||
});
|
||||
handleCliCommand(makeArgs({ dictionary: true }), 'initial', deps);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
assert.ok(calls.includes('log:Generating character dictionary for current anime...'));
|
||||
assert.ok(
|
||||
calls.includes('log:Character dictionary cache hit: AniList 9253 (STEINS;GATE), entries=314'),
|
||||
);
|
||||
assert.ok(calls.includes('log:Dictionary ZIP: /tmp/anilist-9253.zip'));
|
||||
assert.ok(calls.includes('stopApp'));
|
||||
});
|
||||
|
||||
test('handleCliCommand forwards --dictionary-target to dictionary runtime', async () => {
|
||||
let receivedTarget: string | undefined;
|
||||
const { deps } = createDeps({
|
||||
generateCharacterDictionary: async (targetPath?: string) => {
|
||||
receivedTarget = targetPath;
|
||||
return {
|
||||
zipPath: '/tmp/anilist-100.zip',
|
||||
fromCache: false,
|
||||
mediaId: 100,
|
||||
mediaTitle: 'Test',
|
||||
entryCount: 1,
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
handleCliCommand(
|
||||
makeArgs({ dictionary: true, dictionaryTarget: '/tmp/example-video.mkv' }),
|
||||
'initial',
|
||||
deps,
|
||||
);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
|
||||
assert.equal(receivedTarget, '/tmp/example-video.mkv');
|
||||
});
|
||||
|
||||
test('handleCliCommand does not dispatch runJellyfinCommand for non-Jellyfin commands', () => {
|
||||
const nonJellyfinArgs: Array<Partial<CliArgs>> = [
|
||||
{ start: true },
|
||||
|
||||
@@ -17,6 +17,7 @@ export interface CliCommandServiceDeps {
|
||||
isOverlayRuntimeInitialized: () => boolean;
|
||||
initializeOverlayRuntime: () => void;
|
||||
toggleVisibleOverlay: () => void;
|
||||
openFirstRunSetup: () => void;
|
||||
openYomitanSettingsDelayed: (delayMs: number) => void;
|
||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
||||
copyCurrentSubtitle: () => void;
|
||||
@@ -53,6 +54,13 @@ export interface CliCommandServiceDeps {
|
||||
lastError: string | null;
|
||||
};
|
||||
retryAnilistQueue: () => Promise<{ ok: boolean; message: string }>;
|
||||
generateCharacterDictionary: (targetPath?: string) => Promise<{
|
||||
zipPath: string;
|
||||
fromCache: boolean;
|
||||
mediaId: number;
|
||||
mediaTitle: string;
|
||||
entryCount: number;
|
||||
}>;
|
||||
runJellyfinCommand: (args: CliArgs) => Promise<void>;
|
||||
printHelp: () => void;
|
||||
hasMainWindow: () => boolean;
|
||||
@@ -108,6 +116,7 @@ interface MiningCliRuntime {
|
||||
}
|
||||
|
||||
interface UiCliRuntime {
|
||||
openFirstRunSetup: () => void;
|
||||
openYomitanSettings: () => void;
|
||||
cycleSecondarySubMode: () => void;
|
||||
openRuntimeOptionsPalette: () => void;
|
||||
@@ -134,6 +143,15 @@ export interface CliCommandDepsRuntimeOptions {
|
||||
overlay: OverlayCliRuntime;
|
||||
mining: MiningCliRuntime;
|
||||
anilist: AnilistCliRuntime;
|
||||
dictionary: {
|
||||
generate: (targetPath?: string) => Promise<{
|
||||
zipPath: string;
|
||||
fromCache: boolean;
|
||||
mediaId: number;
|
||||
mediaTitle: string;
|
||||
entryCount: number;
|
||||
}>;
|
||||
};
|
||||
jellyfin: {
|
||||
openSetup: () => void;
|
||||
runCommand: (args: CliArgs) => Promise<void>;
|
||||
@@ -179,6 +197,7 @@ export function createCliCommandDepsRuntime(
|
||||
isOverlayRuntimeInitialized: options.overlay.isInitialized,
|
||||
initializeOverlayRuntime: options.overlay.initialize,
|
||||
toggleVisibleOverlay: options.overlay.toggleVisible,
|
||||
openFirstRunSetup: options.ui.openFirstRunSetup,
|
||||
openYomitanSettingsDelayed: (delayMs) => {
|
||||
options.schedule(() => {
|
||||
options.ui.openYomitanSettings();
|
||||
@@ -202,6 +221,7 @@ export function createCliCommandDepsRuntime(
|
||||
openJellyfinSetup: options.jellyfin.openSetup,
|
||||
getAnilistQueueStatus: options.anilist.getQueueStatus,
|
||||
retryAnilistQueue: options.anilist.retryQueueNow,
|
||||
generateCharacterDictionary: options.dictionary.generate,
|
||||
runJellyfinCommand: options.jellyfin.runCommand,
|
||||
printHelp: options.ui.printHelp,
|
||||
hasMainWindow: options.app.hasMainWindow,
|
||||
@@ -239,51 +259,10 @@ export function handleCliCommand(
|
||||
deps.setLogLevel?.(args.logLevel);
|
||||
}
|
||||
|
||||
const hasNonStartAction =
|
||||
args.stop ||
|
||||
args.toggle ||
|
||||
args.toggleVisibleOverlay ||
|
||||
args.settings ||
|
||||
args.show ||
|
||||
args.hide ||
|
||||
args.showVisibleOverlay ||
|
||||
args.hideVisibleOverlay ||
|
||||
args.copySubtitle ||
|
||||
args.copySubtitleMultiple ||
|
||||
args.mineSentence ||
|
||||
args.mineSentenceMultiple ||
|
||||
args.updateLastCardFromClipboard ||
|
||||
args.refreshKnownWords ||
|
||||
args.toggleSecondarySub ||
|
||||
args.triggerFieldGrouping ||
|
||||
args.triggerSubsync ||
|
||||
args.markAudioCard ||
|
||||
args.openRuntimeOptions ||
|
||||
args.anilistStatus ||
|
||||
args.anilistLogout ||
|
||||
args.anilistSetup ||
|
||||
args.anilistRetryQueue ||
|
||||
args.jellyfin ||
|
||||
args.jellyfinLogin ||
|
||||
args.jellyfinLogout ||
|
||||
args.jellyfinLibraries ||
|
||||
args.jellyfinItems ||
|
||||
args.jellyfinSubtitles ||
|
||||
args.jellyfinPlay ||
|
||||
args.jellyfinRemoteAnnounce ||
|
||||
args.texthooker ||
|
||||
args.help;
|
||||
const ignoreStartOnly =
|
||||
source === 'second-instance' &&
|
||||
args.start &&
|
||||
!hasNonStartAction &&
|
||||
deps.isOverlayRuntimeInitialized();
|
||||
if (ignoreStartOnly) {
|
||||
deps.log('Ignoring --start because SubMiner is already running.');
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldStart = args.start || args.toggle || args.toggleVisibleOverlay;
|
||||
const ignoreSecondInstanceStart =
|
||||
source === 'second-instance' && args.start && deps.isOverlayRuntimeInitialized();
|
||||
const shouldStart =
|
||||
(!ignoreSecondInstanceStart && args.start) || args.toggle || args.toggleVisibleOverlay;
|
||||
const needsOverlayRuntime = commandNeedsOverlayRuntime(args);
|
||||
const shouldInitializeOverlayRuntime = needsOverlayRuntime || args.start;
|
||||
|
||||
@@ -306,6 +285,10 @@ export function handleCliCommand(
|
||||
return;
|
||||
}
|
||||
|
||||
if (ignoreSecondInstanceStart) {
|
||||
deps.log('Ignoring --start because SubMiner is already running.');
|
||||
}
|
||||
|
||||
if (shouldInitializeOverlayRuntime && !deps.isOverlayRuntimeInitialized()) {
|
||||
deps.initializeOverlayRuntime();
|
||||
}
|
||||
@@ -319,6 +302,9 @@ export function handleCliCommand(
|
||||
|
||||
if (args.toggle || args.toggleVisibleOverlay) {
|
||||
deps.toggleVisibleOverlay();
|
||||
} else if (args.setup) {
|
||||
deps.openFirstRunSetup();
|
||||
deps.log('Opened first-run setup flow.');
|
||||
} else if (args.settings) {
|
||||
deps.openYomitanSettingsDelayed(1000);
|
||||
} else if (args.show || args.showVisibleOverlay) {
|
||||
@@ -402,6 +388,29 @@ export function handleCliCommand(
|
||||
} else if (args.jellyfin) {
|
||||
deps.openJellyfinSetup();
|
||||
deps.log('Opened Jellyfin setup flow.');
|
||||
} else if (args.dictionary) {
|
||||
const shouldStopAfterRun = source === 'initial' && !deps.hasMainWindow();
|
||||
deps.log('Generating character dictionary for current anime...');
|
||||
deps
|
||||
.generateCharacterDictionary(args.dictionaryTarget)
|
||||
.then((result) => {
|
||||
const cacheLabel = result.fromCache ? 'cache hit' : 'generated';
|
||||
deps.log(
|
||||
`Character dictionary ${cacheLabel}: AniList ${result.mediaId} (${result.mediaTitle}), entries=${result.entryCount}`,
|
||||
);
|
||||
deps.log(`Dictionary ZIP: ${result.zipPath}`);
|
||||
})
|
||||
.catch((error) => {
|
||||
deps.error('generateCharacterDictionary failed:', error);
|
||||
deps.warn(
|
||||
`Dictionary generation failed: ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
})
|
||||
.finally(() => {
|
||||
if (shouldStopAfterRun) {
|
||||
deps.stopApp();
|
||||
}
|
||||
});
|
||||
} else if (args.anilistRetryQueue) {
|
||||
const queueStatus = deps.getAnilistQueueStatus();
|
||||
deps.log(
|
||||
|
||||
@@ -27,6 +27,12 @@ const DatabaseSync: DatabaseSyncCtor | null = (() => {
|
||||
})();
|
||||
const testIfSqlite = DatabaseSync ? test : test.skip;
|
||||
|
||||
if (!DatabaseSync) {
|
||||
console.warn(
|
||||
'Skipping SQLite-backed immersion tracker persistence tests in this runtime; run `bun run test:immersion:sqlite` for real DB coverage.',
|
||||
);
|
||||
}
|
||||
|
||||
let trackerCtor: ImmersionTrackerServiceCtor | null = null;
|
||||
|
||||
async function loadTrackerCtor(): Promise<ImmersionTrackerServiceCtor> {
|
||||
|
||||
@@ -23,6 +23,12 @@ const DatabaseSync: DatabaseSyncCtor | null = (() => {
|
||||
})();
|
||||
const testIfSqlite = DatabaseSync ? test : test.skip;
|
||||
|
||||
if (!DatabaseSync) {
|
||||
console.warn(
|
||||
'Skipping SQLite-backed immersion tracker storage/session tests in this runtime; run `bun run test:immersion:sqlite` for real DB coverage.',
|
||||
);
|
||||
}
|
||||
|
||||
function makeDbPath(): string {
|
||||
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-imm-storage-session-'));
|
||||
return path.join(dir, 'immersion.sqlite');
|
||||
|
||||
@@ -315,7 +315,7 @@ export function createTrackerPreparedStatements(db: DatabaseSync): TrackerPrepar
|
||||
lookup_hits, pause_count, pause_ms, seek_forward_count,
|
||||
seek_backward_count, media_buffer_events, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
||||
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
||||
)
|
||||
`),
|
||||
eventInsertStmt: db.prepare(`
|
||||
@@ -323,7 +323,7 @@ export function createTrackerPreparedStatements(db: DatabaseSync): TrackerPrepar
|
||||
session_id, ts_ms, event_type, line_index, segment_start_ms, segment_end_ms,
|
||||
words_delta, cards_delta, payload_json, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
||||
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
|
||||
)
|
||||
`),
|
||||
wordUpsertStmt: db.prepare(`
|
||||
|
||||
@@ -30,6 +30,15 @@ export {
|
||||
export { openYomitanSettingsWindow } from './yomitan-settings';
|
||||
export { createTokenizerDepsRuntime, tokenizeSubtitle } from './tokenizer';
|
||||
export { clearYomitanParserCachesForWindow } from './tokenizer/yomitan-parser-runtime';
|
||||
export {
|
||||
deleteYomitanDictionaryByTitle,
|
||||
getYomitanDictionaryInfo,
|
||||
getYomitanSettingsFull,
|
||||
importYomitanDictionaryFromZip,
|
||||
removeYomitanDictionarySettings,
|
||||
setYomitanSettingsFull,
|
||||
upsertYomitanDictionarySettings,
|
||||
} from './tokenizer/yomitan-parser-runtime';
|
||||
export { syncYomitanDefaultAnkiServer } from './tokenizer/yomitan-parser-runtime';
|
||||
export { createSubtitleProcessingController } from './subtitle-processing-controller';
|
||||
export { createFrequencyDictionaryLookup } from './frequency-dictionary';
|
||||
|
||||
@@ -38,6 +38,7 @@ function createOptions(overrides: Partial<Parameters<typeof handleMpvCommandFrom
|
||||
mpvSendCommand: (command) => {
|
||||
sentCommands.push(command);
|
||||
},
|
||||
resolveProxyCommandOsd: async () => null,
|
||||
isMpvConnected: () => true,
|
||||
hasRuntimeOptionsManager: () => true,
|
||||
...overrides,
|
||||
@@ -52,30 +53,39 @@ test('handleMpvCommandFromIpc forwards regular mpv commands', () => {
|
||||
assert.deepEqual(osd, []);
|
||||
});
|
||||
|
||||
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', () => {
|
||||
test('handleMpvCommandFromIpc emits osd for subtitle position keybinding proxies', async () => {
|
||||
const { options, sentCommands, osd } = createOptions();
|
||||
handleMpvCommandFromIpc(['add', 'sub-pos', 1], options);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
assert.deepEqual(sentCommands, [['add', 'sub-pos', 1]]);
|
||||
assert.deepEqual(osd, ['Subtitle position: ${sub-pos}']);
|
||||
});
|
||||
|
||||
test('handleMpvCommandFromIpc emits osd for primary subtitle track keybinding proxies', () => {
|
||||
const { options, sentCommands, osd } = createOptions();
|
||||
test('handleMpvCommandFromIpc emits resolved osd for primary subtitle track keybinding proxies', async () => {
|
||||
const { options, sentCommands, osd } = createOptions({
|
||||
resolveProxyCommandOsd: async () => 'Subtitle track: Internal #3 - Japanese (active)',
|
||||
});
|
||||
handleMpvCommandFromIpc(['cycle', 'sid'], options);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
assert.deepEqual(sentCommands, [['cycle', 'sid']]);
|
||||
assert.deepEqual(osd, ['Subtitle track: ${sid}']);
|
||||
assert.deepEqual(osd, ['Subtitle track: Internal #3 - Japanese (active)']);
|
||||
});
|
||||
|
||||
test('handleMpvCommandFromIpc emits osd for secondary subtitle track keybinding proxies', () => {
|
||||
const { options, sentCommands, osd } = createOptions();
|
||||
test('handleMpvCommandFromIpc emits resolved osd for secondary subtitle track keybinding proxies', async () => {
|
||||
const { options, sentCommands, osd } = createOptions({
|
||||
resolveProxyCommandOsd: async () =>
|
||||
'Secondary subtitle track: External #8 - English Commentary',
|
||||
});
|
||||
handleMpvCommandFromIpc(['set_property', 'secondary-sid', 'auto'], options);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
assert.deepEqual(sentCommands, [['set_property', 'secondary-sid', 'auto']]);
|
||||
assert.deepEqual(osd, ['Secondary subtitle track: ${secondary-sid}']);
|
||||
assert.deepEqual(osd, ['Secondary subtitle track: External #8 - English Commentary']);
|
||||
});
|
||||
|
||||
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', () => {
|
||||
test('handleMpvCommandFromIpc emits osd for subtitle delay keybinding proxies', async () => {
|
||||
const { options, sentCommands, osd } = createOptions();
|
||||
handleMpvCommandFromIpc(['add', 'sub-delay', 0.1], options);
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
assert.deepEqual(sentCommands, [['add', 'sub-delay', 0.1]]);
|
||||
assert.deepEqual(osd, ['Subtitle delay: ${sub-delay}']);
|
||||
});
|
||||
|
||||
@@ -23,6 +23,7 @@ export interface HandleMpvCommandFromIpcOptions {
|
||||
mpvPlayNextSubtitle: () => void;
|
||||
shiftSubDelayToAdjacentSubtitle: (direction: 'next' | 'previous') => Promise<void>;
|
||||
mpvSendCommand: (command: (string | number)[]) => void;
|
||||
resolveProxyCommandOsd?: (command: (string | number)[]) => Promise<string | null>;
|
||||
isMpvConnected: () => boolean;
|
||||
hasRuntimeOptionsManager: () => boolean;
|
||||
}
|
||||
@@ -36,7 +37,7 @@ const MPV_PROPERTY_COMMANDS = new Set([
|
||||
'multiply',
|
||||
]);
|
||||
|
||||
function resolveProxyCommandOsd(command: (string | number)[]): string | null {
|
||||
function resolveProxyCommandOsdTemplate(command: (string | number)[]): string | null {
|
||||
const operation = typeof command[0] === 'string' ? command[0] : '';
|
||||
const property = typeof command[1] === 'string' ? command[1] : '';
|
||||
if (!MPV_PROPERTY_COMMANDS.has(operation)) return null;
|
||||
@@ -55,6 +56,25 @@ function resolveProxyCommandOsd(command: (string | number)[]): string | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
function showResolvedProxyCommandOsd(
|
||||
command: (string | number)[],
|
||||
options: HandleMpvCommandFromIpcOptions,
|
||||
): void {
|
||||
const template = resolveProxyCommandOsdTemplate(command);
|
||||
if (!template) return;
|
||||
|
||||
const emit = async () => {
|
||||
try {
|
||||
const resolved = await options.resolveProxyCommandOsd?.(command);
|
||||
options.showMpvOsd(resolved || template);
|
||||
} catch {
|
||||
options.showMpvOsd(template);
|
||||
}
|
||||
};
|
||||
|
||||
void emit();
|
||||
}
|
||||
|
||||
export function handleMpvCommandFromIpc(
|
||||
command: (string | number)[],
|
||||
options: HandleMpvCommandFromIpcOptions,
|
||||
@@ -103,10 +123,7 @@ export function handleMpvCommandFromIpc(
|
||||
options.mpvPlayNextSubtitle();
|
||||
} else {
|
||||
options.mpvSendCommand(command);
|
||||
const osd = resolveProxyCommandOsd(command);
|
||||
if (osd) {
|
||||
options.showMpvOsd(osd);
|
||||
}
|
||||
showResolvedProxyCommandOsd(command, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,6 +22,22 @@ test('showMpvOsdRuntime sends show-text when connected', () => {
|
||||
assert.deepEqual(commands, [['show-text', 'hello', '3000']]);
|
||||
});
|
||||
|
||||
test('showMpvOsdRuntime enables property expansion for placeholder-based messages', () => {
|
||||
const commands: (string | number)[][] = [];
|
||||
showMpvOsdRuntime(
|
||||
{
|
||||
connected: true,
|
||||
send: ({ command }) => {
|
||||
commands.push(command);
|
||||
},
|
||||
},
|
||||
'Subtitle delay: ${sub-delay}',
|
||||
);
|
||||
assert.deepEqual(commands, [
|
||||
['expand-properties', 'show-text', 'Subtitle delay: ${sub-delay}', '3000'],
|
||||
]);
|
||||
});
|
||||
|
||||
test('showMpvOsdRuntime logs fallback when disconnected', () => {
|
||||
const logs: string[] = [];
|
||||
showMpvOsdRuntime(
|
||||
|
||||
@@ -453,3 +453,46 @@ test('MpvIpcClient updates current audio stream index from track list', async ()
|
||||
|
||||
assert.equal(client.currentAudioStreamIndex, 11);
|
||||
});
|
||||
|
||||
test('MpvIpcClient playNextSubtitle preserves a manual paused state', async () => {
|
||||
const commands: unknown[] = [];
|
||||
const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps());
|
||||
(client as any).send = (payload: unknown) => {
|
||||
commands.push(payload);
|
||||
return true;
|
||||
};
|
||||
(client as any).pendingPauseAtSubEnd = true;
|
||||
(client as any).pauseAtTime = 42;
|
||||
|
||||
await invokeHandleMessage(client, {
|
||||
event: 'property-change',
|
||||
name: 'pause',
|
||||
data: true,
|
||||
});
|
||||
|
||||
client.playNextSubtitle();
|
||||
|
||||
assert.equal((client as any).pendingPauseAtSubEnd, false);
|
||||
assert.equal((client as any).pauseAtTime, null);
|
||||
assert.deepEqual(commands, [{ command: ['sub-seek', 1] }]);
|
||||
});
|
||||
|
||||
test('MpvIpcClient playNextSubtitle still auto-pauses at end while already playing', async () => {
|
||||
const commands: unknown[] = [];
|
||||
const client = new MpvIpcClient('/tmp/mpv.sock', makeDeps());
|
||||
(client as any).send = (payload: unknown) => {
|
||||
commands.push(payload);
|
||||
return true;
|
||||
};
|
||||
|
||||
await invokeHandleMessage(client, {
|
||||
event: 'property-change',
|
||||
name: 'pause',
|
||||
data: false,
|
||||
});
|
||||
|
||||
client.playNextSubtitle();
|
||||
|
||||
assert.equal((client as any).pendingPauseAtSubEnd, true);
|
||||
assert.deepEqual(commands, [{ command: ['sub-seek', 1] }]);
|
||||
});
|
||||
|
||||
@@ -53,7 +53,10 @@ export function showMpvOsdRuntime(
|
||||
fallbackLog: (text: string) => void = (line) => logger.info(line),
|
||||
): void {
|
||||
if (mpvClient && mpvClient.connected) {
|
||||
mpvClient.send({ command: ['show-text', text, '3000'] });
|
||||
const command = text.includes('${')
|
||||
? ['expand-properties', 'show-text', text, '3000']
|
||||
: ['show-text', text, '3000'];
|
||||
mpvClient.send({ command });
|
||||
return;
|
||||
}
|
||||
fallbackLog(`OSD (MPV not connected): ${text}`);
|
||||
@@ -161,6 +164,7 @@ export class MpvIpcClient implements MpvClient {
|
||||
osdDimensions: null,
|
||||
};
|
||||
private previousSecondarySubVisibility: boolean | null = null;
|
||||
private playbackPaused: boolean | null = null;
|
||||
private pauseAtTime: number | null = null;
|
||||
private pendingPauseAtSubEnd = false;
|
||||
private nextDynamicRequestId = 1000;
|
||||
@@ -207,6 +211,7 @@ export class MpvIpcClient implements MpvClient {
|
||||
this.connected = false;
|
||||
this.connecting = false;
|
||||
this.socket = null;
|
||||
this.playbackPaused = null;
|
||||
this.emit('connection-change', { connected: false });
|
||||
this.failPendingRequests();
|
||||
this.scheduleReconnect();
|
||||
@@ -310,6 +315,7 @@ export class MpvIpcClient implements MpvClient {
|
||||
this.emit('time-pos-change', payload);
|
||||
},
|
||||
emitPauseChange: (payload) => {
|
||||
this.playbackPaused = payload.paused;
|
||||
this.emit('pause-change', payload);
|
||||
},
|
||||
emitSecondarySubtitleChange: (payload) => {
|
||||
@@ -492,6 +498,12 @@ export class MpvIpcClient implements MpvClient {
|
||||
}
|
||||
|
||||
playNextSubtitle(): void {
|
||||
if (this.playbackPaused === true) {
|
||||
this.pendingPauseAtSubEnd = false;
|
||||
this.pauseAtTime = null;
|
||||
this.send({ command: ['sub-seek', 1] });
|
||||
return;
|
||||
}
|
||||
this.pendingPauseAtSubEnd = true;
|
||||
this.send({ command: ['sub-seek', 1] });
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
toggle: false,
|
||||
toggleVisibleOverlay: false,
|
||||
settings: false,
|
||||
setup: false,
|
||||
show: false,
|
||||
hide: false,
|
||||
showVisibleOverlay: false,
|
||||
@@ -30,6 +31,7 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
|
||||
anilistLogout: false,
|
||||
anilistSetup: false,
|
||||
anilistRetryQueue: false,
|
||||
dictionary: false,
|
||||
jellyfin: false,
|
||||
jellyfinLogin: false,
|
||||
jellyfinLogout: false,
|
||||
|
||||
@@ -69,6 +69,13 @@ export function runStartupBootstrapRuntime(
|
||||
}
|
||||
|
||||
interface AppReadyConfigLike {
|
||||
annotationWebsocket?: {
|
||||
enabled?: boolean;
|
||||
port?: number;
|
||||
};
|
||||
texthooker?: {
|
||||
launchAtStartup?: boolean;
|
||||
};
|
||||
secondarySub?: {
|
||||
defaultMode?: SecondarySubMode;
|
||||
};
|
||||
@@ -92,6 +99,7 @@ interface AppReadyConfigLike {
|
||||
}
|
||||
|
||||
export interface AppReadyRuntimeDeps {
|
||||
ensureDefaultConfigBootstrap: () => void;
|
||||
loadSubtitlePosition: () => void;
|
||||
resolveKeybindings: () => void;
|
||||
createMpvClient: () => void;
|
||||
@@ -104,18 +112,24 @@ export interface AppReadyRuntimeDeps {
|
||||
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
||||
defaultSecondarySubMode: SecondarySubMode;
|
||||
defaultWebsocketPort: number;
|
||||
defaultAnnotationWebsocketPort: number;
|
||||
defaultTexthookerPort: number;
|
||||
hasMpvWebsocketPlugin: () => boolean;
|
||||
startSubtitleWebsocket: (port: number) => void;
|
||||
startAnnotationWebsocket: (port: number) => void;
|
||||
startTexthooker: (port: number, websocketUrl?: string) => void;
|
||||
log: (message: string) => void;
|
||||
createMecabTokenizerAndCheck: () => Promise<void>;
|
||||
createSubtitleTimingTracker: () => void;
|
||||
createImmersionTracker?: () => void;
|
||||
startJellyfinRemoteSession?: () => Promise<void>;
|
||||
loadYomitanExtension: () => Promise<void>;
|
||||
handleFirstRunSetup: () => Promise<void>;
|
||||
prewarmSubtitleDictionaries?: () => Promise<void>;
|
||||
startBackgroundWarmups: () => void;
|
||||
texthookerOnlyMode: boolean;
|
||||
shouldAutoInitializeOverlayRuntimeFromConfig: () => boolean;
|
||||
setVisibleOverlayVisible: (visible: boolean) => void;
|
||||
initializeOverlayRuntime: () => void;
|
||||
handleInitialArgs: () => void;
|
||||
logDebug?: (message: string) => void;
|
||||
@@ -168,8 +182,10 @@ export function isAutoUpdateEnabledRuntime(
|
||||
export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<void> {
|
||||
const now = deps.now ?? (() => Date.now());
|
||||
const startupStartedAtMs = now();
|
||||
deps.ensureDefaultConfigBootstrap();
|
||||
if (deps.shouldSkipHeavyStartup?.()) {
|
||||
await deps.loadYomitanExtension();
|
||||
await deps.handleFirstRunSetup();
|
||||
deps.handleInitialArgs();
|
||||
return;
|
||||
}
|
||||
@@ -178,6 +194,7 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
|
||||
if (deps.shouldSkipHeavyStartup?.()) {
|
||||
await deps.loadYomitanExtension();
|
||||
await deps.handleFirstRunSetup();
|
||||
deps.handleInitialArgs();
|
||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||
return;
|
||||
@@ -209,6 +226,11 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
const wsConfig = config.websocket || {};
|
||||
const wsEnabled = wsConfig.enabled ?? 'auto';
|
||||
const wsPort = wsConfig.port || deps.defaultWebsocketPort;
|
||||
const annotationWsConfig = config.annotationWebsocket || {};
|
||||
const annotationWsEnabled = annotationWsConfig.enabled !== false;
|
||||
const annotationWsPort = annotationWsConfig.port || deps.defaultAnnotationWebsocketPort;
|
||||
const texthookerPort = deps.defaultTexthookerPort;
|
||||
let texthookerWebsocketUrl: string | undefined;
|
||||
|
||||
if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
||||
deps.startSubtitleWebsocket(wsPort);
|
||||
@@ -216,6 +238,17 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
deps.log('mpv_websocket detected, skipping built-in WebSocket server');
|
||||
}
|
||||
|
||||
if (annotationWsEnabled) {
|
||||
deps.startAnnotationWebsocket(annotationWsPort);
|
||||
texthookerWebsocketUrl = `ws://127.0.0.1:${annotationWsPort}`;
|
||||
} else if (wsEnabled === true || (wsEnabled === 'auto' && !deps.hasMpvWebsocketPlugin())) {
|
||||
texthookerWebsocketUrl = `ws://127.0.0.1:${wsPort}`;
|
||||
}
|
||||
|
||||
if (config.texthooker?.launchAtStartup !== false) {
|
||||
deps.startTexthooker(texthookerPort, texthookerWebsocketUrl);
|
||||
}
|
||||
|
||||
deps.createSubtitleTimingTracker();
|
||||
if (deps.createImmersionTracker) {
|
||||
deps.log('Runtime ready: immersion tracker startup deferred until first media activity.');
|
||||
@@ -226,11 +259,14 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
if (deps.texthookerOnlyMode) {
|
||||
deps.log('Texthooker-only mode enabled; skipping overlay window.');
|
||||
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
|
||||
deps.setVisibleOverlayVisible(true);
|
||||
deps.initializeOverlayRuntime();
|
||||
} else {
|
||||
deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
|
||||
}
|
||||
|
||||
await deps.loadYomitanExtension();
|
||||
await deps.handleFirstRunSetup();
|
||||
deps.handleInitialArgs();
|
||||
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);
|
||||
}
|
||||
|
||||
@@ -276,6 +276,59 @@ test('runSubsyncManual writes deterministic _retimed filename when replace is fa
|
||||
assert.equal(outputPath, path.join(tmpDir, 'episode.ja_retimed.srt'));
|
||||
});
|
||||
|
||||
test('runSubsyncManual reports ffsubsync command failures with details', async () => {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-ffsubsync-failure-'));
|
||||
const ffsubsyncPath = path.join(tmpDir, 'ffsubsync.sh');
|
||||
const ffmpegPath = path.join(tmpDir, 'ffmpeg.sh');
|
||||
const alassPath = path.join(tmpDir, 'alass.sh');
|
||||
const videoPath = path.join(tmpDir, 'video.mkv');
|
||||
const primaryPath = path.join(tmpDir, 'primary.srt');
|
||||
|
||||
fs.writeFileSync(videoPath, 'video');
|
||||
fs.writeFileSync(primaryPath, 'sub');
|
||||
writeExecutableScript(ffmpegPath, '#!/bin/sh\nexit 0\n');
|
||||
writeExecutableScript(alassPath, '#!/bin/sh\nexit 0\n');
|
||||
writeExecutableScript(ffsubsyncPath, '#!/bin/sh\necho "reference audio missing" >&2\nexit 1\n');
|
||||
|
||||
const deps = makeDeps({
|
||||
getMpvClient: () => ({
|
||||
connected: true,
|
||||
currentAudioStreamIndex: null,
|
||||
send: () => {},
|
||||
requestProperty: async (name: string) => {
|
||||
if (name === 'path') return videoPath;
|
||||
if (name === 'sid') return 1;
|
||||
if (name === 'secondary-sid') return null;
|
||||
if (name === 'track-list') {
|
||||
return [
|
||||
{
|
||||
id: 1,
|
||||
type: 'sub',
|
||||
selected: true,
|
||||
external: true,
|
||||
'external-filename': primaryPath,
|
||||
},
|
||||
];
|
||||
}
|
||||
return null;
|
||||
},
|
||||
}),
|
||||
getResolvedConfig: () => ({
|
||||
defaultMode: 'manual',
|
||||
alassPath,
|
||||
ffsubsyncPath,
|
||||
ffmpegPath,
|
||||
}),
|
||||
});
|
||||
|
||||
const result = await runSubsyncManual({ engine: 'ffsubsync', sourceTrackId: null }, deps);
|
||||
|
||||
assert.equal(result.ok, false);
|
||||
assert.equal(result.message.startsWith('ffsubsync synchronization failed'), true);
|
||||
assert.match(result.message, /code=1/);
|
||||
assert.match(result.message, /reference audio missing/);
|
||||
});
|
||||
|
||||
test('runSubsyncManual constructs alass command and returns failure on non-zero exit', async () => {
|
||||
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subsync-alass-'));
|
||||
const alassLogPath = path.join(tmpDir, 'alass-args.log');
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import { serializeSubtitleMarkup, serializeSubtitleWebsocketMessage } from './subtitle-ws';
|
||||
import {
|
||||
serializeInitialSubtitleWebsocketMessage,
|
||||
serializeSubtitleMarkup,
|
||||
serializeSubtitleWebsocketMessage,
|
||||
} from './subtitle-ws';
|
||||
import { PartOfSpeech, type SubtitleData } from '../../types';
|
||||
|
||||
const frequencyOptions = {
|
||||
@@ -78,6 +82,51 @@ test('serializeSubtitleMarkup includes known, n+1, jlpt, and frequency classes',
|
||||
assert.match(markup, /word word-frequency-band-1/);
|
||||
});
|
||||
|
||||
test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence', () => {
|
||||
const payload: SubtitleData = {
|
||||
text: 'ignored',
|
||||
tokens: [
|
||||
{
|
||||
surface: '無事',
|
||||
reading: 'ぶじ',
|
||||
headword: '無事',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: true,
|
||||
isNPlusOneTarget: false,
|
||||
jlptLevel: 'N2',
|
||||
frequencyRank: 745,
|
||||
},
|
||||
{
|
||||
surface: 'アレクシア',
|
||||
reading: 'あれくしあ',
|
||||
headword: 'アレクシア',
|
||||
startPos: 2,
|
||||
endPos: 7,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: false,
|
||||
isNPlusOneTarget: false,
|
||||
isNameMatch: true,
|
||||
frequencyRank: 12,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const markup = serializeSubtitleMarkup(payload, frequencyOptions);
|
||||
assert.match(
|
||||
markup,
|
||||
/<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事<\/span>/,
|
||||
);
|
||||
assert.match(
|
||||
markup,
|
||||
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
|
||||
);
|
||||
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
|
||||
});
|
||||
|
||||
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
||||
const payload: SubtitleData = {
|
||||
text: '字幕',
|
||||
@@ -85,5 +134,101 @@ test('serializeSubtitleWebsocketMessage emits sentence payload', () => {
|
||||
};
|
||||
|
||||
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||
assert.deepEqual(JSON.parse(raw), { sentence: '字幕' });
|
||||
assert.deepEqual(JSON.parse(raw), {
|
||||
version: 1,
|
||||
text: '字幕',
|
||||
sentence: '字幕',
|
||||
tokens: [],
|
||||
});
|
||||
});
|
||||
|
||||
test('serializeSubtitleWebsocketMessage emits structured token api payload', () => {
|
||||
const payload: SubtitleData = {
|
||||
text: '無事',
|
||||
tokens: [
|
||||
{
|
||||
surface: '無事',
|
||||
reading: 'ぶじ',
|
||||
headword: '無事',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: true,
|
||||
isNPlusOneTarget: false,
|
||||
jlptLevel: 'N2',
|
||||
frequencyRank: 745,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const raw = serializeSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||
assert.deepEqual(JSON.parse(raw), {
|
||||
version: 1,
|
||||
text: '無事',
|
||||
sentence:
|
||||
'<span class="word word-known word-jlpt-n2" data-reading="ぶじ" data-headword="無事" data-frequency-rank="745" data-jlpt-level="N2">無事</span>',
|
||||
tokens: [
|
||||
{
|
||||
surface: '無事',
|
||||
reading: 'ぶじ',
|
||||
headword: '無事',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: true,
|
||||
isNPlusOneTarget: false,
|
||||
isNameMatch: false,
|
||||
jlptLevel: 'N2',
|
||||
frequencyRank: 745,
|
||||
className: 'word word-known word-jlpt-n2',
|
||||
frequencyRankLabel: '745',
|
||||
jlptLevelLabel: 'N2',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
test('serializeInitialSubtitleWebsocketMessage keeps annotated current subtitle content', () => {
|
||||
const payload: SubtitleData = {
|
||||
text: 'ignored fallback',
|
||||
tokens: [
|
||||
{
|
||||
surface: '既知',
|
||||
reading: '',
|
||||
headword: '',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: true,
|
||||
isNPlusOneTarget: false,
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const raw = serializeInitialSubtitleWebsocketMessage(payload, frequencyOptions);
|
||||
assert.deepEqual(JSON.parse(raw ?? ''), {
|
||||
version: 1,
|
||||
text: 'ignored fallback',
|
||||
sentence: '<span class="word word-known">既知</span>',
|
||||
tokens: [
|
||||
{
|
||||
surface: '既知',
|
||||
reading: '',
|
||||
headword: '',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: false,
|
||||
isKnown: true,
|
||||
isNPlusOneTarget: false,
|
||||
isNameMatch: false,
|
||||
className: 'word word-known',
|
||||
frequencyRankLabel: null,
|
||||
jlptLevelLabel: null,
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
@@ -18,6 +18,26 @@ export type SubtitleWebsocketFrequencyOptions = {
|
||||
mode: 'single' | 'banded';
|
||||
};
|
||||
|
||||
type SerializedSubtitleToken = Pick<
|
||||
MergedToken,
|
||||
| 'surface'
|
||||
| 'reading'
|
||||
| 'headword'
|
||||
| 'startPos'
|
||||
| 'endPos'
|
||||
| 'partOfSpeech'
|
||||
| 'isMerged'
|
||||
| 'isKnown'
|
||||
| 'isNPlusOneTarget'
|
||||
| 'frequencyRank'
|
||||
| 'jlptLevel'
|
||||
> & {
|
||||
isNameMatch: boolean;
|
||||
className: string;
|
||||
frequencyRankLabel: string | null;
|
||||
jlptLevelLabel: string | null;
|
||||
};
|
||||
|
||||
function escapeHtml(text: string): string {
|
||||
return text
|
||||
.replaceAll('&', '&')
|
||||
@@ -46,11 +66,29 @@ function computeFrequencyClass(
|
||||
return 'word-frequency-single';
|
||||
}
|
||||
|
||||
function getFrequencyRankLabel(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string | null {
|
||||
if (!options.enabled) return null;
|
||||
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
|
||||
|
||||
const rank = Math.max(1, Math.floor(token.frequencyRank));
|
||||
const topX = Math.max(1, Math.floor(options.topX));
|
||||
return rank <= topX ? String(rank) : null;
|
||||
}
|
||||
|
||||
function getJlptLevelLabel(token: MergedToken): string | null {
|
||||
return token.jlptLevel ?? null;
|
||||
}
|
||||
|
||||
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
|
||||
const classes = ['word'];
|
||||
|
||||
if (token.isNPlusOneTarget) {
|
||||
classes.push('word-n-plus-one');
|
||||
} else if (token.isNameMatch) {
|
||||
classes.push('word-name-match');
|
||||
} else if (token.isKnown) {
|
||||
classes.push('word-known');
|
||||
}
|
||||
@@ -59,7 +97,7 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
||||
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
|
||||
}
|
||||
|
||||
if (!token.isKnown && !token.isNPlusOneTarget) {
|
||||
if (!token.isKnown && !token.isNPlusOneTarget && !token.isNameMatch) {
|
||||
const frequencyClass = computeFrequencyClass(token, options);
|
||||
if (frequencyClass) {
|
||||
classes.push(frequencyClass);
|
||||
@@ -69,6 +107,55 @@ function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequenc
|
||||
return classes.join(' ');
|
||||
}
|
||||
|
||||
function serializeWordDataAttributes(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string {
|
||||
const attributes: string[] = [];
|
||||
|
||||
if (token.reading) {
|
||||
attributes.push(`data-reading="${escapeHtml(token.reading)}"`);
|
||||
}
|
||||
if (token.headword) {
|
||||
attributes.push(`data-headword="${escapeHtml(token.headword)}"`);
|
||||
}
|
||||
|
||||
const frequencyRankLabel = getFrequencyRankLabel(token, options);
|
||||
if (frequencyRankLabel) {
|
||||
attributes.push(`data-frequency-rank="${escapeHtml(frequencyRankLabel)}"`);
|
||||
}
|
||||
|
||||
const jlptLevelLabel = getJlptLevelLabel(token);
|
||||
if (jlptLevelLabel) {
|
||||
attributes.push(`data-jlpt-level="${escapeHtml(jlptLevelLabel)}"`);
|
||||
}
|
||||
|
||||
return attributes.length > 0 ? ` ${attributes.join(' ')}` : '';
|
||||
}
|
||||
|
||||
function serializeSubtitleToken(
|
||||
token: MergedToken,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): SerializedSubtitleToken {
|
||||
return {
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
startPos: token.startPos,
|
||||
endPos: token.endPos,
|
||||
partOfSpeech: token.partOfSpeech,
|
||||
isMerged: token.isMerged,
|
||||
isKnown: token.isKnown,
|
||||
isNPlusOneTarget: token.isNPlusOneTarget,
|
||||
isNameMatch: token.isNameMatch ?? false,
|
||||
jlptLevel: token.jlptLevel,
|
||||
frequencyRank: token.frequencyRank,
|
||||
className: computeWordClass(token, options),
|
||||
frequencyRankLabel: getFrequencyRankLabel(token, options),
|
||||
jlptLevelLabel: getJlptLevelLabel(token),
|
||||
};
|
||||
}
|
||||
|
||||
export function serializeSubtitleMarkup(
|
||||
payload: SubtitleData,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
@@ -80,11 +167,12 @@ export function serializeSubtitleMarkup(
|
||||
const chunks: string[] = [];
|
||||
for (const token of payload.tokens) {
|
||||
const klass = computeWordClass(token, options);
|
||||
const attrs = serializeWordDataAttributes(token, options);
|
||||
const parts = token.surface.split('\n');
|
||||
for (let index = 0; index < parts.length; index += 1) {
|
||||
const part = parts[index];
|
||||
if (part) {
|
||||
chunks.push(`<span class="${klass}">${escapeHtml(part)}</span>`);
|
||||
chunks.push(`<span class="${klass}"${attrs}>${escapeHtml(part)}</span>`);
|
||||
}
|
||||
if (index < parts.length - 1) {
|
||||
chunks.push('<br>');
|
||||
@@ -99,7 +187,23 @@ export function serializeSubtitleWebsocketMessage(
|
||||
payload: SubtitleData,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string {
|
||||
return JSON.stringify({ sentence: serializeSubtitleMarkup(payload, options) });
|
||||
return JSON.stringify({
|
||||
version: 1,
|
||||
text: payload.text,
|
||||
sentence: serializeSubtitleMarkup(payload, options),
|
||||
tokens: payload.tokens?.map((token) => serializeSubtitleToken(token, options)) ?? [],
|
||||
});
|
||||
}
|
||||
|
||||
export function serializeInitialSubtitleWebsocketMessage(
|
||||
payload: SubtitleData | null,
|
||||
options: SubtitleWebsocketFrequencyOptions,
|
||||
): string | null {
|
||||
if (!payload || !payload.text.trim()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return serializeSubtitleWebsocketMessage(payload, options);
|
||||
}
|
||||
|
||||
export class SubtitleWebSocket {
|
||||
@@ -114,7 +218,11 @@ export class SubtitleWebSocket {
|
||||
return (this.server?.clients.size ?? 0) > 0;
|
||||
}
|
||||
|
||||
public start(port: number, getCurrentSubtitleText: () => string): void {
|
||||
public start(
|
||||
port: number,
|
||||
getCurrentSubtitleData: () => SubtitleData | null,
|
||||
getFrequencyOptions: () => SubtitleWebsocketFrequencyOptions,
|
||||
): void {
|
||||
this.server = new WebSocket.Server({ port, host: '127.0.0.1' });
|
||||
|
||||
this.server.on('connection', (ws: WebSocket) => {
|
||||
@@ -124,9 +232,12 @@ export class SubtitleWebSocket {
|
||||
return;
|
||||
}
|
||||
|
||||
const currentText = getCurrentSubtitleText();
|
||||
if (currentText) {
|
||||
ws.send(JSON.stringify({ sentence: currentText }));
|
||||
const currentMessage = serializeInitialSubtitleWebsocketMessage(
|
||||
getCurrentSubtitleData(),
|
||||
getFrequencyOptions(),
|
||||
);
|
||||
if (currentMessage) {
|
||||
ws.send(currentMessage);
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
27
src/core/services/texthooker.test.ts
Normal file
27
src/core/services/texthooker.test.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { injectTexthookerBootstrapHtml } from './texthooker';
|
||||
|
||||
test('injectTexthookerBootstrapHtml injects websocket bootstrap before head close', () => {
|
||||
const html = '<html><head><title>Texthooker</title></head><body></body></html>';
|
||||
|
||||
const actual = injectTexthookerBootstrapHtml(html, 'ws://127.0.0.1:6678');
|
||||
|
||||
assert.match(
|
||||
actual,
|
||||
/window\.localStorage\.setItem\('bannou-texthooker-websocketUrl', "ws:\/\/127\.0\.0\.1:6678"\)/,
|
||||
);
|
||||
assert.ok(actual.indexOf('</script></head>') !== -1);
|
||||
assert.ok(actual.includes('bannou-texthooker-websocketUrl'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableKnownWordColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableNPlusOneColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableNameMatchColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableFrequencyColoring'));
|
||||
assert.ok(!actual.includes('bannou-texthooker-enableJlptColoring'));
|
||||
});
|
||||
|
||||
test('injectTexthookerBootstrapHtml leaves html unchanged without websocketUrl', () => {
|
||||
const html = '<html><head></head><body></body></html>';
|
||||
|
||||
assert.equal(injectTexthookerBootstrapHtml(html), html);
|
||||
});
|
||||
@@ -5,6 +5,22 @@ import { createLogger } from '../../logger';
|
||||
|
||||
const logger = createLogger('main:texthooker');
|
||||
|
||||
export function injectTexthookerBootstrapHtml(html: string, websocketUrl?: string): string {
|
||||
if (!websocketUrl) {
|
||||
return html;
|
||||
}
|
||||
|
||||
const bootstrapScript = `<script>window.localStorage.setItem('bannou-texthooker-websocketUrl', ${JSON.stringify(
|
||||
websocketUrl,
|
||||
)});</script>`;
|
||||
|
||||
if (html.includes('</head>')) {
|
||||
return html.replace('</head>', `${bootstrapScript}</head>`);
|
||||
}
|
||||
|
||||
return `${bootstrapScript}${html}`;
|
||||
}
|
||||
|
||||
export class Texthooker {
|
||||
private server: http.Server | null = null;
|
||||
|
||||
@@ -12,7 +28,11 @@ export class Texthooker {
|
||||
return this.server !== null;
|
||||
}
|
||||
|
||||
public start(port: number): http.Server | null {
|
||||
public start(port: number, websocketUrl?: string): http.Server | null {
|
||||
if (this.server) {
|
||||
return this.server;
|
||||
}
|
||||
|
||||
const texthookerPath = this.getTexthookerPath();
|
||||
if (!texthookerPath) {
|
||||
logger.error('texthooker-ui not found');
|
||||
@@ -42,8 +62,12 @@ export class Texthooker {
|
||||
res.end('Not found');
|
||||
return;
|
||||
}
|
||||
const responseData =
|
||||
urlPath === '/' || urlPath === '/index.html'
|
||||
? Buffer.from(injectTexthookerBootstrapHtml(data.toString('utf-8'), websocketUrl))
|
||||
: data;
|
||||
res.writeHead(200, { 'Content-Type': mimeTypes[ext] || 'text/plain' });
|
||||
res.end(data);
|
||||
res.end(responseData);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -24,31 +24,40 @@ interface YomitanTokenInput {
|
||||
surface: string;
|
||||
reading?: string;
|
||||
headword?: string;
|
||||
isNameMatch?: boolean;
|
||||
}
|
||||
|
||||
function makeDepsFromYomitanTokens(
|
||||
tokens: YomitanTokenInput[],
|
||||
overrides: Partial<TokenizerServiceDeps> = {},
|
||||
): TokenizerServiceDeps {
|
||||
let cursor = 0;
|
||||
return makeDeps({
|
||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||
getYomitanParserWindow: () =>
|
||||
({
|
||||
isDestroyed: () => false,
|
||||
webContents: {
|
||||
executeJavaScript: async () => [
|
||||
{
|
||||
source: 'scanning-parser',
|
||||
index: 0,
|
||||
content: tokens.map((token) => [
|
||||
{
|
||||
text: token.surface,
|
||||
reading: token.reading ?? token.surface,
|
||||
headwords: [[{ term: token.headword ?? token.surface }]],
|
||||
},
|
||||
]),
|
||||
},
|
||||
],
|
||||
executeJavaScript: async (script: string) => {
|
||||
if (script.includes('getTermFrequencies')) {
|
||||
return [];
|
||||
}
|
||||
|
||||
cursor = 0;
|
||||
return tokens.map((token) => {
|
||||
const startPos = cursor;
|
||||
const endPos = startPos + token.surface.length;
|
||||
cursor = endPos;
|
||||
return {
|
||||
surface: token.surface,
|
||||
reading: token.reading ?? token.surface,
|
||||
headword: token.headword ?? token.surface,
|
||||
startPos,
|
||||
endPos,
|
||||
isNameMatch: token.isNameMatch ?? false,
|
||||
};
|
||||
});
|
||||
},
|
||||
},
|
||||
}) as unknown as Electron.BrowserWindow,
|
||||
...overrides,
|
||||
@@ -108,6 +117,20 @@ test('tokenizeSubtitle assigns JLPT level to parsed Yomitan tokens', async () =>
|
||||
assert.equal(result.tokens?.[0]?.jlptLevel, 'N5');
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle preserves Yomitan name-match metadata on tokens', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'アクアです',
|
||||
makeDepsFromYomitanTokens([
|
||||
{ surface: 'アクア', reading: 'あくあ', headword: 'アクア', isNameMatch: true },
|
||||
{ surface: 'です', reading: 'です', headword: 'です' },
|
||||
]),
|
||||
);
|
||||
|
||||
assert.equal(result.tokens?.length, 2);
|
||||
assert.equal((result.tokens?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
|
||||
assert.equal((result.tokens?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle caches JLPT lookups across repeated tokens', async () => {
|
||||
let lookupCalls = 0;
|
||||
const result = await tokenizeSubtitle(
|
||||
@@ -182,6 +205,69 @@ test('tokenizeSubtitle applies frequency dictionary ranks', async () => {
|
||||
assert.equal(result.tokens?.[1]?.frequencyRank, 1200);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle uses left-to-right yomitan scanning to keep full katakana name tokens', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'カズマ 魔王軍',
|
||||
makeDeps({
|
||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||
getYomitanParserWindow: () =>
|
||||
({
|
||||
isDestroyed: () => false,
|
||||
webContents: {
|
||||
executeJavaScript: async (script: string) => {
|
||||
if (script.includes('getTermFrequencies')) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
surface: 'カズマ',
|
||||
reading: 'かずま',
|
||||
headword: 'カズマ',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
},
|
||||
{
|
||||
surface: '魔王軍',
|
||||
reading: 'まおうぐん',
|
||||
headword: '魔王軍',
|
||||
startPos: 4,
|
||||
endPos: 7,
|
||||
},
|
||||
];
|
||||
},
|
||||
},
|
||||
}) as unknown as Electron.BrowserWindow,
|
||||
}),
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
result.tokens?.map((token) => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
startPos: token.startPos,
|
||||
endPos: token.endPos,
|
||||
})),
|
||||
[
|
||||
{
|
||||
surface: 'カズマ',
|
||||
reading: 'かずま',
|
||||
headword: 'カズマ',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
},
|
||||
{
|
||||
surface: '魔王軍',
|
||||
reading: 'まおうぐん',
|
||||
headword: '魔王軍',
|
||||
startPos: 4,
|
||||
endPos: 7,
|
||||
},
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle loads frequency ranks from Yomitan installed dictionaries', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'猫',
|
||||
@@ -1165,6 +1251,30 @@ test('tokenizeSubtitle normalizes newlines before Yomitan parse request', async
|
||||
assert.equal(result.tokens, null);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle collapses zero-width separators before Yomitan parse request', async () => {
|
||||
let parseInput = '';
|
||||
const result = await tokenizeSubtitle(
|
||||
'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが!',
|
||||
makeDeps({
|
||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||
getYomitanParserWindow: () =>
|
||||
({
|
||||
isDestroyed: () => false,
|
||||
webContents: {
|
||||
executeJavaScript: async (script: string) => {
|
||||
parseInput = script;
|
||||
return null;
|
||||
},
|
||||
},
|
||||
}) as unknown as Electron.BrowserWindow,
|
||||
}),
|
||||
);
|
||||
|
||||
assert.match(parseInput, /キリキリと かかってこい このヘナチョコ冒険者どもめが!/);
|
||||
assert.equal(result.text, 'キリキリと\u200bかかってこい\nこのヘナチョコ冒険者どもめが!');
|
||||
assert.equal(result.tokens, null);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle returns null tokens when Yomitan parsing is unavailable', async () => {
|
||||
const result = await tokenizeSubtitle('猫です', makeDeps());
|
||||
|
||||
@@ -1751,9 +1861,9 @@ test('tokenizeSubtitle keeps parsing explicit by scanning-parser source only', a
|
||||
assert.equal(result.tokens?.[4]?.frequencyRank, 1500);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', async () => {
|
||||
test('tokenizeSubtitle still assigns frequency to non-known multi-character Yomitan tokens', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'小園に',
|
||||
'小園友達',
|
||||
makeDeps({
|
||||
getYomitanExt: () => ({ id: 'dummy-ext' }) as any,
|
||||
getYomitanParserWindow: () =>
|
||||
@@ -1774,9 +1884,9 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
|
||||
],
|
||||
[
|
||||
{
|
||||
text: 'に',
|
||||
reading: 'に',
|
||||
headwords: [[{ term: 'に' }]],
|
||||
text: '友達',
|
||||
reading: 'ともだち',
|
||||
headwords: [[{ term: '友達' }]],
|
||||
},
|
||||
],
|
||||
],
|
||||
@@ -1785,7 +1895,7 @@ test('tokenizeSubtitle still assigns frequency to non-known Yomitan tokens', asy
|
||||
},
|
||||
}) as unknown as Electron.BrowserWindow,
|
||||
getFrequencyDictionaryEnabled: () => true,
|
||||
getFrequencyRank: (text) => (text === '小園' ? 75 : text === 'に' ? 3000 : null),
|
||||
getFrequencyRank: (text) => (text === '小園' ? 75 : text === '友達' ? 3000 : null),
|
||||
isKnownWord: (text) => text === '小園',
|
||||
}),
|
||||
);
|
||||
@@ -2525,6 +2635,21 @@ test('tokenizeSubtitle excludes default non-independent pos2 from N+1 and freque
|
||||
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, false);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle excludes single-kana merged tokens from frequency highlighting', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'た',
|
||||
makeDepsFromYomitanTokens([{ surface: 'た', reading: 'た', headword: 'た' }], {
|
||||
getFrequencyDictionaryEnabled: () => true,
|
||||
getFrequencyRank: (text) => (text === 'た' ? 17 : null),
|
||||
getMinSentenceWordsForNPlusOne: () => 1,
|
||||
tokenizeWithMecab: async () => null,
|
||||
}),
|
||||
);
|
||||
|
||||
assert.equal(result.tokens?.length, 1);
|
||||
assert.equal(result.tokens?.[0]?.frequencyRank, undefined);
|
||||
});
|
||||
|
||||
test('tokenizeSubtitle excludes merged function/content token from frequency highlighting but keeps N+1', async () => {
|
||||
const result = await tokenizeSubtitle(
|
||||
'になれば',
|
||||
|
||||
@@ -9,6 +9,7 @@ import {
|
||||
Token,
|
||||
FrequencyDictionaryLookup,
|
||||
JlptLevel,
|
||||
PartOfSpeech,
|
||||
} from '../../types';
|
||||
import {
|
||||
DEFAULT_ANNOTATION_POS1_EXCLUSION_CONFIG,
|
||||
@@ -18,9 +19,8 @@ import {
|
||||
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
|
||||
resolveAnnotationPos2ExclusionSet,
|
||||
} from '../../token-pos2-exclusions';
|
||||
import { selectYomitanParseTokens } from './tokenizer/parser-selection-stage';
|
||||
import {
|
||||
requestYomitanParseResults,
|
||||
requestYomitanScanTokens,
|
||||
requestYomitanTermFrequencies,
|
||||
} from './tokenizer/yomitan-parser-runtime';
|
||||
|
||||
@@ -44,6 +44,7 @@ export interface TokenizerServiceDeps {
|
||||
getJlptLevel: (text: string) => JlptLevel | null;
|
||||
getNPlusOneEnabled?: () => boolean;
|
||||
getJlptEnabled?: () => boolean;
|
||||
getNameMatchEnabled?: () => boolean;
|
||||
getFrequencyDictionaryEnabled?: () => boolean;
|
||||
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
||||
getFrequencyRank?: FrequencyDictionaryLookup;
|
||||
@@ -73,6 +74,7 @@ export interface TokenizerDepsRuntimeOptions {
|
||||
getJlptLevel: (text: string) => JlptLevel | null;
|
||||
getNPlusOneEnabled?: () => boolean;
|
||||
getJlptEnabled?: () => boolean;
|
||||
getNameMatchEnabled?: () => boolean;
|
||||
getFrequencyDictionaryEnabled?: () => boolean;
|
||||
getFrequencyDictionaryMatchMode?: () => FrequencyDictionaryMatchMode;
|
||||
getFrequencyRank?: FrequencyDictionaryLookup;
|
||||
@@ -85,6 +87,7 @@ export interface TokenizerDepsRuntimeOptions {
|
||||
interface TokenizerAnnotationOptions {
|
||||
nPlusOneEnabled: boolean;
|
||||
jlptEnabled: boolean;
|
||||
nameMatchEnabled: boolean;
|
||||
frequencyEnabled: boolean;
|
||||
frequencyMatchMode: FrequencyDictionaryMatchMode;
|
||||
minSentenceWordsForNPlusOne: number | undefined;
|
||||
@@ -106,6 +109,7 @@ const DEFAULT_ANNOTATION_POS1_EXCLUSIONS = resolveAnnotationPos1ExclusionSet(
|
||||
const DEFAULT_ANNOTATION_POS2_EXCLUSIONS = resolveAnnotationPos2ExclusionSet(
|
||||
DEFAULT_ANNOTATION_POS2_EXCLUSION_CONFIG,
|
||||
);
|
||||
const INVISIBLE_SEPARATOR_PATTERN = /[\u200b\u2060\ufeff]/g;
|
||||
|
||||
function getKnownWordLookup(
|
||||
deps: TokenizerServiceDeps,
|
||||
@@ -189,6 +193,7 @@ export function createTokenizerDepsRuntime(
|
||||
getJlptLevel: options.getJlptLevel,
|
||||
getNPlusOneEnabled: options.getNPlusOneEnabled,
|
||||
getJlptEnabled: options.getJlptEnabled,
|
||||
getNameMatchEnabled: options.getNameMatchEnabled,
|
||||
getFrequencyDictionaryEnabled: options.getFrequencyDictionaryEnabled,
|
||||
getFrequencyDictionaryMatchMode: options.getFrequencyDictionaryMatchMode ?? (() => 'headword'),
|
||||
getFrequencyRank: options.getFrequencyRank,
|
||||
@@ -263,6 +268,7 @@ function isKanaChar(char: string): boolean {
|
||||
return (
|
||||
(code >= 0x3041 && code <= 0x3096) ||
|
||||
(code >= 0x309b && code <= 0x309f) ||
|
||||
code === 0x30fc ||
|
||||
(code >= 0x30a0 && code <= 0x30fa) ||
|
||||
(code >= 0x30fd && code <= 0x30ff)
|
||||
);
|
||||
@@ -295,6 +301,11 @@ function normalizeYomitanMergedReading(token: MergedToken): string {
|
||||
function normalizeSelectedYomitanTokens(tokens: MergedToken[]): MergedToken[] {
|
||||
return tokens.map((token) => ({
|
||||
...token,
|
||||
partOfSpeech: token.partOfSpeech ?? PartOfSpeech.other,
|
||||
isMerged: token.isMerged ?? true,
|
||||
isKnown: token.isKnown ?? false,
|
||||
isNPlusOneTarget: token.isNPlusOneTarget ?? false,
|
||||
isNameMatch: token.isNameMatch ?? false,
|
||||
reading: normalizeYomitanMergedReading(token),
|
||||
}));
|
||||
}
|
||||
@@ -454,6 +465,7 @@ function getAnnotationOptions(deps: TokenizerServiceDeps): TokenizerAnnotationOp
|
||||
return {
|
||||
nPlusOneEnabled: deps.getNPlusOneEnabled?.() !== false,
|
||||
jlptEnabled: deps.getJlptEnabled?.() !== false,
|
||||
nameMatchEnabled: deps.getNameMatchEnabled?.() !== false,
|
||||
frequencyEnabled: deps.getFrequencyDictionaryEnabled?.() !== false,
|
||||
frequencyMatchMode: deps.getFrequencyDictionaryMatchMode?.() ?? 'headword',
|
||||
minSentenceWordsForNPlusOne: deps.getMinSentenceWordsForNPlusOne?.(),
|
||||
@@ -467,20 +479,28 @@ async function parseWithYomitanInternalParser(
|
||||
deps: TokenizerServiceDeps,
|
||||
options: TokenizerAnnotationOptions,
|
||||
): Promise<MergedToken[] | null> {
|
||||
const parseResults = await requestYomitanParseResults(text, deps, logger);
|
||||
if (!parseResults) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const selectedTokens = selectYomitanParseTokens(
|
||||
parseResults,
|
||||
getKnownWordLookup(deps, options),
|
||||
deps.getKnownWordMatchMode(),
|
||||
);
|
||||
const selectedTokens = await requestYomitanScanTokens(text, deps, logger, {
|
||||
includeNameMatchMetadata: options.nameMatchEnabled,
|
||||
});
|
||||
if (!selectedTokens || selectedTokens.length === 0) {
|
||||
return null;
|
||||
}
|
||||
const normalizedSelectedTokens = normalizeSelectedYomitanTokens(selectedTokens);
|
||||
const normalizedSelectedTokens = normalizeSelectedYomitanTokens(
|
||||
selectedTokens.map(
|
||||
(token): MergedToken => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
startPos: token.startPos,
|
||||
endPos: token.endPos,
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
isMerged: true,
|
||||
isKnown: false,
|
||||
isNPlusOneTarget: false,
|
||||
isNameMatch: token.isNameMatch ?? false,
|
||||
}),
|
||||
),
|
||||
);
|
||||
|
||||
if (deps.getYomitanGroupDebugEnabled?.() === true) {
|
||||
logSelectedYomitanGroups(text, normalizedSelectedTokens);
|
||||
@@ -553,7 +573,11 @@ export async function tokenizeSubtitle(
|
||||
return { text, tokens: null };
|
||||
}
|
||||
|
||||
const tokenizeText = displayText.replace(/\n/g, ' ').replace(/\s+/g, ' ').trim();
|
||||
const tokenizeText = displayText
|
||||
.replace(INVISIBLE_SEPARATOR_PATTERN, ' ')
|
||||
.replace(/\n/g, ' ')
|
||||
.replace(/\s+/g, ' ')
|
||||
.trim();
|
||||
const annotationOptions = getAnnotationOptions(deps);
|
||||
|
||||
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps, annotationOptions);
|
||||
|
||||
@@ -252,12 +252,12 @@ test('annotateTokens applies configured pos1 exclusions to both frequency and N+
|
||||
test('annotateTokens allows previously default-excluded pos1 when removed from effective set', () => {
|
||||
const tokens = [
|
||||
makeToken({
|
||||
surface: 'は',
|
||||
headword: 'は',
|
||||
surface: 'まで',
|
||||
headword: 'まで',
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
pos1: '助詞',
|
||||
startPos: 0,
|
||||
endPos: 1,
|
||||
endPos: 2,
|
||||
frequencyRank: 8,
|
||||
}),
|
||||
];
|
||||
@@ -314,6 +314,52 @@ test('annotateTokens excludes likely kana SFX tokens from frequency when POS tag
|
||||
assert.equal(result[0]?.frequencyRank, undefined);
|
||||
});
|
||||
|
||||
test('annotateTokens excludes single hiragana and katakana tokens from frequency when POS tags are missing', () => {
|
||||
const tokens = [
|
||||
makeToken({
|
||||
surface: 'た',
|
||||
reading: 'た',
|
||||
headword: 'た',
|
||||
pos1: '',
|
||||
pos2: '',
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
frequencyRank: 21,
|
||||
startPos: 0,
|
||||
endPos: 1,
|
||||
}),
|
||||
makeToken({
|
||||
surface: 'ア',
|
||||
reading: 'ア',
|
||||
headword: 'ア',
|
||||
pos1: '',
|
||||
pos2: '',
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
frequencyRank: 22,
|
||||
startPos: 1,
|
||||
endPos: 2,
|
||||
}),
|
||||
makeToken({
|
||||
surface: '山',
|
||||
reading: 'やま',
|
||||
headword: '山',
|
||||
pos1: '',
|
||||
pos2: '',
|
||||
partOfSpeech: PartOfSpeech.other,
|
||||
frequencyRank: 23,
|
||||
startPos: 2,
|
||||
endPos: 3,
|
||||
}),
|
||||
];
|
||||
|
||||
const result = annotateTokens(tokens, makeDeps(), {
|
||||
minSentenceWordsForNPlusOne: 1,
|
||||
});
|
||||
|
||||
assert.equal(result[0]?.frequencyRank, undefined);
|
||||
assert.equal(result[1]?.frequencyRank, undefined);
|
||||
assert.equal(result[2]?.frequencyRank, 23);
|
||||
});
|
||||
|
||||
test('annotateTokens keeps frequency when mecab tags classify token as content-bearing', () => {
|
||||
const tokens = [
|
||||
makeToken({
|
||||
|
||||
@@ -103,6 +103,10 @@ function isFrequencyExcludedByPos(
|
||||
pos1Exclusions: ReadonlySet<string>,
|
||||
pos2Exclusions: ReadonlySet<string>,
|
||||
): boolean {
|
||||
if (isSingleKanaFrequencyNoiseToken(token.surface)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const normalizedPos1 = normalizePos1Tag(token.pos1);
|
||||
const hasPos1 = normalizedPos1.length > 0;
|
||||
if (isExcludedByTagSet(normalizedPos1, pos1Exclusions)) {
|
||||
@@ -231,6 +235,7 @@ function isKanaChar(char: string): boolean {
|
||||
return (
|
||||
(code >= 0x3041 && code <= 0x3096) ||
|
||||
(code >= 0x309b && code <= 0x309f) ||
|
||||
code === 0x30fc ||
|
||||
(code >= 0x30a0 && code <= 0x30fa) ||
|
||||
(code >= 0x30fd && code <= 0x30ff)
|
||||
);
|
||||
@@ -362,6 +367,20 @@ function isLikelyFrequencyNoiseToken(token: MergedToken): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
function isSingleKanaFrequencyNoiseToken(text: string | undefined): boolean {
|
||||
if (typeof text !== 'string') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const normalized = text.trim();
|
||||
if (!normalized) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const chars = [...normalized];
|
||||
return chars.length === 1 && isKanaChar(chars[0]!);
|
||||
}
|
||||
|
||||
function isJlptEligibleToken(token: MergedToken): boolean {
|
||||
if (token.pos1 && shouldIgnoreJlptForMecabPos1(token.pos1)) {
|
||||
return false;
|
||||
|
||||
@@ -127,3 +127,88 @@ test('drops scanning parser tokens which have no dictionary headword', () => {
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('prefers the longest dictionary headword across merged segments', () => {
|
||||
const parseResults = [
|
||||
makeParseItem('scanning-parser', [
|
||||
[
|
||||
{ text: 'バニ', reading: 'ばに', headword: 'バニ' },
|
||||
{ text: 'ール', reading: 'ーる', headword: 'バニール' },
|
||||
],
|
||||
]),
|
||||
];
|
||||
|
||||
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
|
||||
assert.deepEqual(
|
||||
tokens?.map((token) => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
})),
|
||||
[
|
||||
{
|
||||
surface: 'バニール',
|
||||
reading: 'ばにーる',
|
||||
headword: 'バニール',
|
||||
},
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('keeps the first headword when later segments are standalone words', () => {
|
||||
const parseResults = [
|
||||
makeParseItem('scanning-parser', [
|
||||
[
|
||||
{ text: '猫', reading: 'ねこ', headword: '猫' },
|
||||
{ text: 'です', reading: 'です', headword: 'です' },
|
||||
],
|
||||
]),
|
||||
];
|
||||
|
||||
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
|
||||
assert.deepEqual(
|
||||
tokens?.map((token) => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
})),
|
||||
[
|
||||
{
|
||||
surface: '猫です',
|
||||
reading: 'ねこです',
|
||||
headword: '猫',
|
||||
},
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('merges trailing katakana continuation without headword into previous token', () => {
|
||||
const parseResults = [
|
||||
makeParseItem('scanning-parser', [
|
||||
[{ text: 'カズ', reading: 'かず', headword: 'カズマ' }],
|
||||
[{ text: 'マ', reading: 'ま' }],
|
||||
[{ text: '魔王軍', reading: 'まおうぐん', headword: '魔王軍' }],
|
||||
]),
|
||||
];
|
||||
|
||||
const tokens = selectYomitanParseTokens(parseResults, () => false, 'headword');
|
||||
assert.deepEqual(
|
||||
tokens?.map((token) => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
})),
|
||||
[
|
||||
{
|
||||
surface: 'カズマ',
|
||||
reading: 'かずま',
|
||||
headword: 'カズマ',
|
||||
},
|
||||
{
|
||||
surface: '魔王軍',
|
||||
reading: 'まおうぐん',
|
||||
headword: '魔王軍',
|
||||
},
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
@@ -49,6 +49,7 @@ function isKanaChar(char: string): boolean {
|
||||
return (
|
||||
(code >= 0x3041 && code <= 0x3096) ||
|
||||
(code >= 0x309b && code <= 0x309f) ||
|
||||
code === 0x30fc ||
|
||||
(code >= 0x30a0 && code <= 0x30fa) ||
|
||||
(code >= 0x30fd && code <= 0x30ff)
|
||||
);
|
||||
@@ -111,6 +112,51 @@ function extractYomitanHeadword(segment: YomitanParseSegment): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
function selectMergedHeadword(
|
||||
firstHeadword: string,
|
||||
expandedHeadwords: string[],
|
||||
surface: string,
|
||||
): string {
|
||||
if (expandedHeadwords.length > 0) {
|
||||
const exactSurfaceMatch = expandedHeadwords.find((headword) => headword === surface);
|
||||
if (exactSurfaceMatch) {
|
||||
return exactSurfaceMatch;
|
||||
}
|
||||
|
||||
return expandedHeadwords.reduce((best, current) => {
|
||||
if (current.length !== best.length) {
|
||||
return current.length > best.length ? current : best;
|
||||
}
|
||||
return best;
|
||||
});
|
||||
}
|
||||
|
||||
if (!firstHeadword) {
|
||||
return '';
|
||||
}
|
||||
return firstHeadword;
|
||||
}
|
||||
|
||||
function isKanaOnlyText(text: string): boolean {
|
||||
return text.length > 0 && Array.from(text).every((char) => isKanaChar(char));
|
||||
}
|
||||
|
||||
function shouldMergeKanaContinuation(
|
||||
previousToken: MergedToken | undefined,
|
||||
continuationSurface: string,
|
||||
): previousToken is MergedToken {
|
||||
if (!previousToken || !continuationSurface || !isKanaOnlyText(continuationSurface)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!previousToken.headword || previousToken.headword.length <= previousToken.surface.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const appendedSurface = previousToken.surface + continuationSurface;
|
||||
return previousToken.headword.startsWith(appendedSurface);
|
||||
}
|
||||
|
||||
export function mapYomitanParseResultItemToMergedTokens(
|
||||
parseResult: YomitanParseResultItem,
|
||||
isKnownWord: (text: string) => boolean,
|
||||
@@ -140,7 +186,8 @@ export function mapYomitanParseResultItemToMergedTokens(
|
||||
|
||||
let combinedSurface = '';
|
||||
let combinedReading = '';
|
||||
let combinedHeadword = '';
|
||||
let firstHeadword = '';
|
||||
const expandedHeadwords: string[] = [];
|
||||
|
||||
for (const segment of line) {
|
||||
const segmentText = segment.text;
|
||||
@@ -152,8 +199,14 @@ export function mapYomitanParseResultItemToMergedTokens(
|
||||
if (typeof segment.reading === 'string') {
|
||||
combinedReading += segment.reading;
|
||||
}
|
||||
if (!combinedHeadword) {
|
||||
combinedHeadword = extractYomitanHeadword(segment);
|
||||
const segmentHeadword = extractYomitanHeadword(segment);
|
||||
if (segmentHeadword) {
|
||||
if (!firstHeadword) {
|
||||
firstHeadword = segmentHeadword;
|
||||
}
|
||||
if (segmentHeadword.length > segmentText.length) {
|
||||
expandedHeadwords.push(segmentHeadword);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -164,7 +217,20 @@ export function mapYomitanParseResultItemToMergedTokens(
|
||||
const start = charOffset;
|
||||
const end = start + combinedSurface.length;
|
||||
charOffset = end;
|
||||
const combinedHeadword = selectMergedHeadword(
|
||||
firstHeadword,
|
||||
expandedHeadwords,
|
||||
combinedSurface,
|
||||
);
|
||||
if (!combinedHeadword) {
|
||||
const previousToken = tokens[tokens.length - 1];
|
||||
if (shouldMergeKanaContinuation(previousToken, combinedSurface)) {
|
||||
previousToken.surface += combinedSurface;
|
||||
previousToken.reading += combinedReading;
|
||||
previousToken.endPos = end;
|
||||
continue;
|
||||
}
|
||||
|
||||
// No dictionary-backed headword for this merged unit; skip it entirely so
|
||||
// downstream keyboard/frequency/JLPT flows only operate on lookup-backed tokens.
|
||||
continue;
|
||||
|
||||
@@ -1,12 +1,26 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import * as fs from 'fs';
|
||||
import * as os from 'os';
|
||||
import * as path from 'path';
|
||||
import test from 'node:test';
|
||||
import * as vm from 'node:vm';
|
||||
import {
|
||||
requestYomitanParseResults,
|
||||
getYomitanDictionaryInfo,
|
||||
importYomitanDictionaryFromZip,
|
||||
deleteYomitanDictionaryByTitle,
|
||||
removeYomitanDictionarySettings,
|
||||
requestYomitanScanTokens,
|
||||
requestYomitanTermFrequencies,
|
||||
syncYomitanDefaultAnkiServer,
|
||||
upsertYomitanDictionarySettings,
|
||||
} from './yomitan-parser-runtime';
|
||||
|
||||
function createDeps(executeJavaScript: (script: string) => Promise<unknown>) {
|
||||
function createDeps(
|
||||
executeJavaScript: (script: string) => Promise<unknown>,
|
||||
options?: {
|
||||
createYomitanExtensionWindow?: (pageName: string) => Promise<unknown>;
|
||||
},
|
||||
) {
|
||||
const parserWindow = {
|
||||
isDestroyed: () => false,
|
||||
webContents: {
|
||||
@@ -22,9 +36,44 @@ function createDeps(executeJavaScript: (script: string) => Promise<unknown>) {
|
||||
setYomitanParserReadyPromise: () => undefined,
|
||||
getYomitanParserInitPromise: () => null,
|
||||
setYomitanParserInitPromise: () => undefined,
|
||||
createYomitanExtensionWindow: options?.createYomitanExtensionWindow as never,
|
||||
};
|
||||
}
|
||||
|
||||
async function runInjectedYomitanScript(
|
||||
script: string,
|
||||
handler: (action: string, params: unknown) => unknown,
|
||||
): Promise<unknown> {
|
||||
return await vm.runInNewContext(script, {
|
||||
chrome: {
|
||||
runtime: {
|
||||
lastError: null,
|
||||
sendMessage: (
|
||||
payload: { action?: string; params?: unknown },
|
||||
callback: (response: { result?: unknown; error?: { message?: string } }) => void,
|
||||
) => {
|
||||
try {
|
||||
callback({ result: handler(payload.action ?? '', payload.params) });
|
||||
} catch (error) {
|
||||
callback({ error: { message: (error as Error).message } });
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
Array,
|
||||
Error,
|
||||
JSON,
|
||||
Map,
|
||||
Math,
|
||||
Number,
|
||||
Object,
|
||||
Promise,
|
||||
RegExp,
|
||||
Set,
|
||||
String,
|
||||
});
|
||||
}
|
||||
|
||||
test('syncYomitanDefaultAnkiServer updates default profile server when script reports update', async () => {
|
||||
let scriptValue = '';
|
||||
const deps = createDeps(async (script) => {
|
||||
@@ -389,7 +438,7 @@ test('requestYomitanTermFrequencies caches repeated term+reading lookups', async
|
||||
assert.equal(frequencyCalls, 1);
|
||||
});
|
||||
|
||||
test('requestYomitanParseResults disables Yomitan MeCab parser path', async () => {
|
||||
test('requestYomitanScanTokens uses left-to-right termsFind scanning instead of parseText', async () => {
|
||||
const scripts: string[] = [];
|
||||
const deps = createDeps(async (script) => {
|
||||
scripts.push(script);
|
||||
@@ -405,15 +454,517 @@ test('requestYomitanParseResults disables Yomitan MeCab parser path', async () =
|
||||
],
|
||||
};
|
||||
}
|
||||
return [];
|
||||
return [
|
||||
{
|
||||
surface: 'カズマ',
|
||||
reading: 'かずま',
|
||||
headword: 'カズマ',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const result = await requestYomitanParseResults('猫です', deps, {
|
||||
const result = await requestYomitanScanTokens('カズマ', deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
|
||||
assert.deepEqual(result, []);
|
||||
const parseScript = scripts.find((script) => script.includes('parseText'));
|
||||
assert.ok(parseScript, 'expected parseText request script');
|
||||
assert.match(parseScript ?? '', /useMecabParser:\s*false/);
|
||||
assert.deepEqual(result, [
|
||||
{
|
||||
surface: 'カズマ',
|
||||
reading: 'かずま',
|
||||
headword: 'カズマ',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
},
|
||||
]);
|
||||
const scannerScript = scripts.find((script) => script.includes('termsFind'));
|
||||
assert.ok(scannerScript, 'expected termsFind scanning request script');
|
||||
assert.doesNotMatch(scannerScript ?? '', /parseText/);
|
||||
assert.match(scannerScript ?? '', /matchType:\s*"exact"/);
|
||||
assert.match(scannerScript ?? '', /deinflect:\s*true/);
|
||||
});
|
||||
|
||||
test('requestYomitanScanTokens marks tokens backed by SubMiner character dictionary entries', async () => {
|
||||
const deps = createDeps(async (script) => {
|
||||
if (script.includes('optionsGetFull')) {
|
||||
return {
|
||||
profileCurrent: 0,
|
||||
profiles: [
|
||||
{
|
||||
options: {
|
||||
scanning: { length: 40 },
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
surface: 'アクア',
|
||||
reading: 'あくあ',
|
||||
headword: 'アクア',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
isNameMatch: true,
|
||||
},
|
||||
{
|
||||
surface: 'です',
|
||||
reading: 'です',
|
||||
headword: 'です',
|
||||
startPos: 3,
|
||||
endPos: 5,
|
||||
isNameMatch: false,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const result = await requestYomitanScanTokens('アクアです', deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(result?.length, 2);
|
||||
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, true);
|
||||
assert.equal((result?.[1] as { isNameMatch?: boolean } | undefined)?.isNameMatch, false);
|
||||
});
|
||||
|
||||
test('requestYomitanScanTokens skips name-match work when disabled', async () => {
|
||||
let scannerScript = '';
|
||||
const deps = createDeps(async (script) => {
|
||||
if (script.includes('termsFind')) {
|
||||
scannerScript = script;
|
||||
}
|
||||
if (script.includes('optionsGetFull')) {
|
||||
return {
|
||||
profileCurrent: 0,
|
||||
profiles: [
|
||||
{
|
||||
options: {
|
||||
scanning: { length: 40 },
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return [
|
||||
{
|
||||
surface: 'アクア',
|
||||
reading: 'あくあ',
|
||||
headword: 'アクア',
|
||||
startPos: 0,
|
||||
endPos: 3,
|
||||
},
|
||||
];
|
||||
});
|
||||
|
||||
const result = await requestYomitanScanTokens(
|
||||
'アクア',
|
||||
deps,
|
||||
{ error: () => undefined },
|
||||
{ includeNameMatchMetadata: false },
|
||||
);
|
||||
|
||||
assert.equal(result?.length, 1);
|
||||
assert.equal((result?.[0] as { isNameMatch?: boolean } | undefined)?.isNameMatch, undefined);
|
||||
assert.match(scannerScript, /const includeNameMatchMetadata = false;/);
|
||||
});
|
||||
|
||||
test('requestYomitanScanTokens marks grouped entries when SubMiner dictionary alias only exists on definitions', async () => {
|
||||
let scannerScript = '';
|
||||
const deps = createDeps(async (script) => {
|
||||
if (script.includes('termsFind')) {
|
||||
scannerScript = script;
|
||||
return [];
|
||||
}
|
||||
if (script.includes('optionsGetFull')) {
|
||||
return {
|
||||
profileCurrent: 0,
|
||||
profiles: [
|
||||
{
|
||||
options: {
|
||||
scanning: { length: 40 },
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
await requestYomitanScanTokens(
|
||||
'カズマ',
|
||||
deps,
|
||||
{ error: () => undefined },
|
||||
{ includeNameMatchMetadata: true },
|
||||
);
|
||||
|
||||
assert.match(scannerScript, /getPreferredHeadword/);
|
||||
|
||||
const result = await runInjectedYomitanScript(scannerScript, (action, params) => {
|
||||
if (action === 'termsFind') {
|
||||
const text = (params as { text?: string } | undefined)?.text;
|
||||
if (text === 'カズマ') {
|
||||
return {
|
||||
originalTextLength: 3,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
dictionaryAlias: '',
|
||||
headwords: [
|
||||
{
|
||||
term: 'カズマ',
|
||||
reading: 'かずま',
|
||||
sources: [{ originalText: 'カズマ', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
definitions: [
|
||||
{ dictionary: 'JMdict', dictionaryAlias: 'JMdict' },
|
||||
{
|
||||
dictionary: 'SubMiner Character Dictionary (AniList 130298)',
|
||||
dictionaryAlias: 'SubMiner Character Dictionary (AniList 130298)',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
return { originalTextLength: 0, dictionaryEntries: [] };
|
||||
}
|
||||
throw new Error(`unexpected action: ${action}`);
|
||||
});
|
||||
|
||||
assert.equal(Array.isArray(result), true);
|
||||
assert.equal((result as { length?: number } | null)?.length, 1);
|
||||
assert.equal((result as Array<{ surface?: string }>)[0]?.surface, 'カズマ');
|
||||
assert.equal((result as Array<{ headword?: string }>)[0]?.headword, 'カズマ');
|
||||
assert.equal((result as Array<{ startPos?: number }>)[0]?.startPos, 0);
|
||||
assert.equal((result as Array<{ endPos?: number }>)[0]?.endPos, 3);
|
||||
assert.equal((result as Array<{ isNameMatch?: boolean }>)[0]?.isNameMatch, true);
|
||||
});
|
||||
|
||||
test('requestYomitanScanTokens skips fallback fragments without exact primary source matches', async () => {
|
||||
const deps = createDeps(async (script) => {
|
||||
if (script.includes('optionsGetFull')) {
|
||||
return {
|
||||
profileCurrent: 0,
|
||||
profiles: [
|
||||
{
|
||||
options: {
|
||||
scanning: { length: 40 },
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
return await runInjectedYomitanScript(script, (action, params) => {
|
||||
if (action !== 'termsFind') {
|
||||
throw new Error(`unexpected action: ${action}`);
|
||||
}
|
||||
|
||||
const text = (params as { text?: string } | undefined)?.text ?? '';
|
||||
if (text.startsWith('だが ')) {
|
||||
return {
|
||||
originalTextLength: 2,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: 'だが',
|
||||
reading: 'だが',
|
||||
sources: [{ originalText: 'だが', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (text.startsWith('それでも')) {
|
||||
return {
|
||||
originalTextLength: 4,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: 'それでも',
|
||||
reading: 'それでも',
|
||||
sources: [{ originalText: 'それでも', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (text.startsWith('届かぬ')) {
|
||||
return {
|
||||
originalTextLength: 3,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: '届く',
|
||||
reading: 'とどく',
|
||||
sources: [{ originalText: '届かぬ', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (text.startsWith('高み')) {
|
||||
return {
|
||||
originalTextLength: 2,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: '高み',
|
||||
reading: 'たかみ',
|
||||
sources: [{ originalText: '高み', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (text.startsWith('があった')) {
|
||||
return {
|
||||
originalTextLength: 2,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: 'があ',
|
||||
reading: '',
|
||||
sources: [{ originalText: 'が', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
if (text.startsWith('あった')) {
|
||||
return {
|
||||
originalTextLength: 3,
|
||||
dictionaryEntries: [
|
||||
{
|
||||
headwords: [
|
||||
{
|
||||
term: 'ある',
|
||||
reading: 'ある',
|
||||
sources: [{ originalText: 'あった', isPrimary: true, matchType: 'exact' }],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
return { originalTextLength: 0, dictionaryEntries: [] };
|
||||
});
|
||||
});
|
||||
|
||||
const result = await requestYomitanScanTokens('だが それでも届かぬ高みがあった', deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
|
||||
assert.deepEqual(
|
||||
result?.map((token) => ({
|
||||
surface: token.surface,
|
||||
headword: token.headword,
|
||||
startPos: token.startPos,
|
||||
endPos: token.endPos,
|
||||
})),
|
||||
[
|
||||
{
|
||||
surface: 'だが',
|
||||
headword: 'だが',
|
||||
startPos: 0,
|
||||
endPos: 2,
|
||||
},
|
||||
{
|
||||
surface: 'それでも',
|
||||
headword: 'それでも',
|
||||
startPos: 3,
|
||||
endPos: 7,
|
||||
},
|
||||
{
|
||||
surface: '届かぬ',
|
||||
headword: '届く',
|
||||
startPos: 7,
|
||||
endPos: 10,
|
||||
},
|
||||
{
|
||||
surface: '高み',
|
||||
headword: '高み',
|
||||
startPos: 10,
|
||||
endPos: 12,
|
||||
},
|
||||
{
|
||||
surface: 'あった',
|
||||
headword: 'ある',
|
||||
startPos: 13,
|
||||
endPos: 16,
|
||||
},
|
||||
],
|
||||
);
|
||||
});
|
||||
|
||||
test('getYomitanDictionaryInfo requests dictionary info via backend action', async () => {
|
||||
let scriptValue = '';
|
||||
const deps = createDeps(async (script) => {
|
||||
scriptValue = script;
|
||||
return [{ title: 'SubMiner Character Dictionary (AniList 130298)', revision: '1' }];
|
||||
});
|
||||
|
||||
const dictionaries = await getYomitanDictionaryInfo(deps, { error: () => undefined });
|
||||
assert.equal(dictionaries.length, 1);
|
||||
assert.equal(dictionaries[0]?.title, 'SubMiner Character Dictionary (AniList 130298)');
|
||||
assert.match(scriptValue, /getDictionaryInfo/);
|
||||
});
|
||||
|
||||
test('dictionary settings helpers upsert and remove dictionary entries without reordering', async () => {
|
||||
const scripts: string[] = [];
|
||||
const optionsFull = {
|
||||
profileCurrent: 0,
|
||||
profiles: [
|
||||
{
|
||||
options: {
|
||||
dictionaries: [
|
||||
{
|
||||
name: 'Jitendex',
|
||||
alias: 'Jitendex',
|
||||
enabled: true,
|
||||
},
|
||||
{
|
||||
name: 'SubMiner Character Dictionary (AniList 1)',
|
||||
alias: 'SubMiner Character Dictionary (AniList 1)',
|
||||
enabled: false,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
|
||||
const deps = createDeps(async (script) => {
|
||||
scripts.push(script);
|
||||
if (script.includes('optionsGetFull')) {
|
||||
return JSON.parse(JSON.stringify(optionsFull));
|
||||
}
|
||||
if (script.includes('setAllSettings')) {
|
||||
return true;
|
||||
}
|
||||
return null;
|
||||
});
|
||||
|
||||
const title = 'SubMiner Character Dictionary (AniList 1)';
|
||||
const upserted = await upsertYomitanDictionarySettings(title, 'all', deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
const removed = await removeYomitanDictionarySettings(title, 'all', 'delete', deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(upserted, true);
|
||||
assert.equal(removed, true);
|
||||
const setCalls = scripts.filter((script) => script.includes('setAllSettings')).length;
|
||||
assert.equal(setCalls, 2);
|
||||
|
||||
const upsertScript = scripts.find(
|
||||
(script) =>
|
||||
script.includes('setAllSettings') &&
|
||||
script.includes('"SubMiner Character Dictionary (AniList 1)"'),
|
||||
);
|
||||
assert.ok(upsertScript);
|
||||
const jitendexOffset = upsertScript?.indexOf('"Jitendex"') ?? -1;
|
||||
const subMinerOffset = upsertScript?.indexOf('"SubMiner Character Dictionary (AniList 1)"') ?? -1;
|
||||
assert.equal(jitendexOffset >= 0, true);
|
||||
assert.equal(subMinerOffset >= 0, true);
|
||||
assert.equal(jitendexOffset < subMinerOffset, true);
|
||||
assert.match(upsertScript ?? '', /"enabled":true/);
|
||||
});
|
||||
|
||||
test('importYomitanDictionaryFromZip uses settings automation bridge instead of custom backend action', async () => {
|
||||
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-import-'));
|
||||
const zipPath = path.join(tempDir, 'dict.zip');
|
||||
fs.writeFileSync(zipPath, Buffer.from('zip-bytes'));
|
||||
|
||||
const scripts: string[] = [];
|
||||
const settingsWindow = {
|
||||
isDestroyed: () => false,
|
||||
destroy: () => undefined,
|
||||
webContents: {
|
||||
executeJavaScript: async (script: string) => {
|
||||
scripts.push(script);
|
||||
return true;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const deps = createDeps(async () => true, {
|
||||
createYomitanExtensionWindow: async (pageName: string) => {
|
||||
assert.equal(pageName, 'settings.html');
|
||||
return settingsWindow;
|
||||
},
|
||||
});
|
||||
|
||||
const imported = await importYomitanDictionaryFromZip(zipPath, deps, {
|
||||
error: () => undefined,
|
||||
});
|
||||
|
||||
assert.equal(imported, true);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('importDictionaryArchiveBase64')),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('subminerImportDictionary')),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('deleteYomitanDictionaryByTitle uses settings automation bridge instead of custom backend action', async () => {
|
||||
const scripts: string[] = [];
|
||||
const settingsWindow = {
|
||||
isDestroyed: () => false,
|
||||
destroy: () => undefined,
|
||||
webContents: {
|
||||
executeJavaScript: async (script: string) => {
|
||||
scripts.push(script);
|
||||
return true;
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const deps = createDeps(async () => true, {
|
||||
createYomitanExtensionWindow: async (pageName: string) => {
|
||||
assert.equal(pageName, 'settings.html');
|
||||
return settingsWindow;
|
||||
},
|
||||
});
|
||||
|
||||
const deleted = await deleteYomitanDictionaryByTitle(
|
||||
'SubMiner Character Dictionary (AniList 130298)',
|
||||
deps,
|
||||
{ error: () => undefined },
|
||||
);
|
||||
|
||||
assert.equal(deleted, true);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('__subminerYomitanSettingsAutomation')),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('deleteDictionary')),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
scripts.some((script) => script.includes('subminerDeleteDictionary')),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import type { BrowserWindow, Extension } from 'electron';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { selectYomitanParseTokens } from './parser-selection-stage';
|
||||
|
||||
interface LoggerLike {
|
||||
error: (message: string, ...args: unknown[]) => void;
|
||||
@@ -13,6 +16,12 @@ interface YomitanParserRuntimeDeps {
|
||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||
getYomitanParserInitPromise: () => Promise<boolean> | null;
|
||||
setYomitanParserInitPromise: (promise: Promise<boolean> | null) => void;
|
||||
createYomitanExtensionWindow?: (pageName: string) => Promise<BrowserWindow | null>;
|
||||
}
|
||||
|
||||
export interface YomitanDictionaryInfo {
|
||||
title: string;
|
||||
revision?: string | number;
|
||||
}
|
||||
|
||||
export interface YomitanTermFrequency {
|
||||
@@ -30,6 +39,15 @@ export interface YomitanTermReadingPair {
|
||||
reading: string | null;
|
||||
}
|
||||
|
||||
export interface YomitanScanToken {
|
||||
surface: string;
|
||||
reading: string;
|
||||
headword: string;
|
||||
startPos: number;
|
||||
endPos: number;
|
||||
isNameMatch?: boolean;
|
||||
}
|
||||
|
||||
interface YomitanProfileMetadata {
|
||||
profileIndex: number;
|
||||
scanLength: number;
|
||||
@@ -48,6 +66,22 @@ function isObject(value: unknown): value is Record<string, unknown> {
|
||||
return Boolean(value && typeof value === 'object');
|
||||
}
|
||||
|
||||
function isScanTokenArray(value: unknown): value is YomitanScanToken[] {
|
||||
return (
|
||||
Array.isArray(value) &&
|
||||
value.every(
|
||||
(entry) =>
|
||||
isObject(entry) &&
|
||||
typeof entry.surface === 'string' &&
|
||||
typeof entry.reading === 'string' &&
|
||||
typeof entry.headword === 'string' &&
|
||||
typeof entry.startPos === 'number' &&
|
||||
typeof entry.endPos === 'number' &&
|
||||
(entry.isNameMatch === undefined || typeof entry.isNameMatch === 'boolean'),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function makeTermReadingCacheKey(term: string, reading: string | null): string {
|
||||
return `${term}\u0000${reading ?? ''}`;
|
||||
}
|
||||
@@ -489,6 +523,392 @@ async function ensureYomitanParserWindow(
|
||||
return initPromise;
|
||||
}
|
||||
|
||||
async function createYomitanExtensionWindow(
|
||||
pageName: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<BrowserWindow | null> {
|
||||
if (typeof deps.createYomitanExtensionWindow === 'function') {
|
||||
return await deps.createYomitanExtensionWindow(pageName);
|
||||
}
|
||||
|
||||
const electron = await import('electron');
|
||||
const yomitanExt = deps.getYomitanExt();
|
||||
if (!yomitanExt) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const { BrowserWindow, session } = electron;
|
||||
const window = new BrowserWindow({
|
||||
show: false,
|
||||
width: 1200,
|
||||
height: 800,
|
||||
webPreferences: {
|
||||
contextIsolation: true,
|
||||
nodeIntegration: false,
|
||||
session: session.defaultSession,
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
window.webContents.once('did-finish-load', () => resolve());
|
||||
window.webContents.once('did-fail-load', (_event, _errorCode, errorDescription) => {
|
||||
reject(new Error(errorDescription));
|
||||
});
|
||||
void window
|
||||
.loadURL(`chrome-extension://${yomitanExt.id}/${pageName}`)
|
||||
.catch((error: Error) => reject(error));
|
||||
});
|
||||
return window;
|
||||
} catch (err) {
|
||||
logger.error(`Failed to create hidden Yomitan ${pageName} window: ${(err as Error).message}`);
|
||||
if (!window.isDestroyed()) {
|
||||
window.destroy();
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function invokeYomitanSettingsAutomation<T>(
|
||||
script: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<T | null> {
|
||||
const settingsWindow = await createYomitanExtensionWindow('settings.html', deps, logger);
|
||||
if (!settingsWindow || settingsWindow.isDestroyed()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
await settingsWindow.webContents.executeJavaScript(
|
||||
`
|
||||
(async () => {
|
||||
const deadline = Date.now() + 10000;
|
||||
while (Date.now() < deadline) {
|
||||
if (globalThis.__subminerYomitanSettingsAutomation?.ready === true) {
|
||||
return true;
|
||||
}
|
||||
await new Promise((resolve) => setTimeout(resolve, 50));
|
||||
}
|
||||
throw new Error("Yomitan settings automation bridge did not become ready");
|
||||
})();
|
||||
`,
|
||||
true,
|
||||
);
|
||||
|
||||
return (await settingsWindow.webContents.executeJavaScript(script, true)) as T;
|
||||
} catch (err) {
|
||||
logger.error('Failed to drive Yomitan settings automation:', (err as Error).message);
|
||||
return null;
|
||||
} finally {
|
||||
if (!settingsWindow.isDestroyed()) {
|
||||
settingsWindow.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const YOMITAN_SCANNING_HELPERS = String.raw`
|
||||
const HIRAGANA_CONVERSION_RANGE = [0x3041, 0x3096];
|
||||
const KATAKANA_CONVERSION_RANGE = [0x30a1, 0x30f6];
|
||||
const KANA_PROLONGED_SOUND_MARK_CODE_POINT = 0x30fc;
|
||||
const KATAKANA_SMALL_KA_CODE_POINT = 0x30f5;
|
||||
const KATAKANA_SMALL_KE_CODE_POINT = 0x30f6;
|
||||
const KANA_RANGES = [[0x3040, 0x309f], [0x30a0, 0x30ff]];
|
||||
const JAPANESE_RANGES = [[0x3040, 0x30ff], [0x3400, 0x9fff]];
|
||||
function isCodePointInRange(codePoint, range) { return codePoint >= range[0] && codePoint <= range[1]; }
|
||||
function isCodePointInRanges(codePoint, ranges) { return ranges.some((range) => isCodePointInRange(codePoint, range)); }
|
||||
function isCodePointKana(codePoint) { return isCodePointInRanges(codePoint, KANA_RANGES); }
|
||||
function isCodePointJapanese(codePoint) { return isCodePointInRanges(codePoint, JAPANESE_RANGES); }
|
||||
function createFuriganaSegment(text, reading) { return {text, reading}; }
|
||||
function getProlongedHiragana(previousCharacter) {
|
||||
switch (previousCharacter) {
|
||||
case "あ": case "か": case "が": case "さ": case "ざ": case "た": case "だ": case "な": case "は": case "ば": case "ぱ": case "ま": case "や": case "ら": case "わ": case "ぁ": case "ゃ": case "ゎ": return "あ";
|
||||
case "い": case "き": case "ぎ": case "し": case "じ": case "ち": case "ぢ": case "に": case "ひ": case "び": case "ぴ": case "み": case "り": case "ぃ": return "い";
|
||||
case "う": case "く": case "ぐ": case "す": case "ず": case "つ": case "づ": case "ぬ": case "ふ": case "ぶ": case "ぷ": case "む": case "ゆ": case "る": case "ぅ": case "ゅ": return "う";
|
||||
case "え": case "け": case "げ": case "せ": case "ぜ": case "て": case "で": case "ね": case "へ": case "べ": case "ぺ": case "め": case "れ": case "ぇ": return "え";
|
||||
case "お": case "こ": case "ご": case "そ": case "ぞ": case "と": case "ど": case "の": case "ほ": case "ぼ": case "ぽ": case "も": case "よ": case "ろ": case "を": case "ぉ": case "ょ": return "う";
|
||||
default: return null;
|
||||
}
|
||||
}
|
||||
function getFuriganaKanaSegments(text, reading) {
|
||||
const newSegments = [];
|
||||
let start = 0;
|
||||
let state = (reading[0] === text[0]);
|
||||
for (let i = 1; i < text.length; ++i) {
|
||||
const newState = (reading[i] === text[i]);
|
||||
if (state === newState) { continue; }
|
||||
newSegments.push(createFuriganaSegment(text.substring(start, i), state ? '' : reading.substring(start, i)));
|
||||
state = newState;
|
||||
start = i;
|
||||
}
|
||||
newSegments.push(createFuriganaSegment(text.substring(start), state ? '' : reading.substring(start)));
|
||||
return newSegments;
|
||||
}
|
||||
function convertKatakanaToHiragana(text, keepProlongedSoundMarks = false) {
|
||||
let result = '';
|
||||
const offset = (HIRAGANA_CONVERSION_RANGE[0] - KATAKANA_CONVERSION_RANGE[0]);
|
||||
for (let char of text) {
|
||||
const codePoint = char.codePointAt(0);
|
||||
switch (codePoint) {
|
||||
case KATAKANA_SMALL_KA_CODE_POINT:
|
||||
case KATAKANA_SMALL_KE_CODE_POINT:
|
||||
break;
|
||||
case KANA_PROLONGED_SOUND_MARK_CODE_POINT:
|
||||
if (!keepProlongedSoundMarks && result.length > 0) {
|
||||
const char2 = getProlongedHiragana(result[result.length - 1]);
|
||||
if (char2 !== null) { char = char2; }
|
||||
}
|
||||
break;
|
||||
default:
|
||||
if (isCodePointInRange(codePoint, KATAKANA_CONVERSION_RANGE)) {
|
||||
char = String.fromCodePoint(codePoint + offset);
|
||||
}
|
||||
break;
|
||||
}
|
||||
result += char;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function segmentizeFurigana(reading, readingNormalized, groups, groupsStart) {
|
||||
const groupCount = groups.length - groupsStart;
|
||||
if (groupCount <= 0) { return reading.length === 0 ? [] : null; }
|
||||
const group = groups[groupsStart];
|
||||
const {isKana, text} = group;
|
||||
if (isKana) {
|
||||
if (group.textNormalized !== null && readingNormalized.startsWith(group.textNormalized)) {
|
||||
const segments = segmentizeFurigana(reading.substring(text.length), readingNormalized.substring(text.length), groups, groupsStart + 1);
|
||||
if (segments !== null) {
|
||||
if (reading.startsWith(text)) { segments.unshift(createFuriganaSegment(text, '')); }
|
||||
else { segments.unshift(...getFuriganaKanaSegments(text, reading)); }
|
||||
return segments;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
let result = null;
|
||||
for (let i = reading.length; i >= text.length; --i) {
|
||||
const segments = segmentizeFurigana(reading.substring(i), readingNormalized.substring(i), groups, groupsStart + 1);
|
||||
if (segments !== null) {
|
||||
if (result !== null) { return null; }
|
||||
segments.unshift(createFuriganaSegment(text, reading.substring(0, i)));
|
||||
result = segments;
|
||||
}
|
||||
if (groupCount === 1) { break; }
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function distributeFurigana(term, reading) {
|
||||
if (reading === term) { return [createFuriganaSegment(term, '')]; }
|
||||
const groups = [];
|
||||
let groupPre = null;
|
||||
let isKanaPre = null;
|
||||
for (const c of term) {
|
||||
const isKana = isCodePointKana(c.codePointAt(0));
|
||||
if (isKana === isKanaPre) { groupPre.text += c; }
|
||||
else {
|
||||
groupPre = {isKana, text: c, textNormalized: null};
|
||||
groups.push(groupPre);
|
||||
isKanaPre = isKana;
|
||||
}
|
||||
}
|
||||
for (const group of groups) {
|
||||
if (group.isKana) { group.textNormalized = convertKatakanaToHiragana(group.text); }
|
||||
}
|
||||
const segments = segmentizeFurigana(reading, convertKatakanaToHiragana(reading), groups, 0);
|
||||
return segments !== null ? segments : [createFuriganaSegment(term, reading)];
|
||||
}
|
||||
function getStemLength(text1, text2) {
|
||||
const minLength = Math.min(text1.length, text2.length);
|
||||
if (minLength === 0) { return 0; }
|
||||
let i = 0;
|
||||
while (true) {
|
||||
const char1 = text1.codePointAt(i);
|
||||
const char2 = text2.codePointAt(i);
|
||||
if (char1 !== char2) { break; }
|
||||
const charLength = String.fromCodePoint(char1).length;
|
||||
i += charLength;
|
||||
if (i >= minLength) {
|
||||
if (i > minLength) { i -= charLength; }
|
||||
break;
|
||||
}
|
||||
}
|
||||
return i;
|
||||
}
|
||||
function distributeFuriganaInflected(term, reading, source) {
|
||||
const termNormalized = convertKatakanaToHiragana(term);
|
||||
const readingNormalized = convertKatakanaToHiragana(reading);
|
||||
const sourceNormalized = convertKatakanaToHiragana(source);
|
||||
let mainText = term;
|
||||
let stemLength = getStemLength(termNormalized, sourceNormalized);
|
||||
const readingStemLength = getStemLength(readingNormalized, sourceNormalized);
|
||||
if (readingStemLength > 0 && readingStemLength >= stemLength) {
|
||||
mainText = reading;
|
||||
stemLength = readingStemLength;
|
||||
reading = source.substring(0, stemLength) + reading.substring(stemLength);
|
||||
}
|
||||
const segments = [];
|
||||
if (stemLength > 0) {
|
||||
mainText = source.substring(0, stemLength) + mainText.substring(stemLength);
|
||||
const segments2 = distributeFurigana(mainText, reading);
|
||||
let consumed = 0;
|
||||
for (const segment of segments2) {
|
||||
const start = consumed;
|
||||
consumed += segment.text.length;
|
||||
if (consumed < stemLength) { segments.push(segment); }
|
||||
else if (consumed === stemLength) { segments.push(segment); break; }
|
||||
else {
|
||||
if (start < stemLength) { segments.push(createFuriganaSegment(mainText.substring(start, stemLength), '')); }
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (stemLength < source.length) {
|
||||
const remainder = source.substring(stemLength);
|
||||
const last = segments[segments.length - 1];
|
||||
if (last && last.reading.length === 0) { last.text += remainder; }
|
||||
else { segments.push(createFuriganaSegment(remainder, '')); }
|
||||
}
|
||||
return segments;
|
||||
}
|
||||
function getPreferredHeadword(dictionaryEntries, token) {
|
||||
function appendDictionaryNames(target, value) {
|
||||
if (!value || typeof value !== 'object') {
|
||||
return;
|
||||
}
|
||||
const candidates = [
|
||||
value.dictionary,
|
||||
value.dictionaryName,
|
||||
value.name,
|
||||
value.title,
|
||||
value.dictionaryTitle,
|
||||
value.dictionaryAlias
|
||||
];
|
||||
for (const candidate of candidates) {
|
||||
if (typeof candidate === 'string' && candidate.trim().length > 0) {
|
||||
target.push(candidate.trim());
|
||||
}
|
||||
}
|
||||
}
|
||||
function getDictionaryEntryNames(entry) {
|
||||
const names = [];
|
||||
appendDictionaryNames(names, entry);
|
||||
for (const definition of entry?.definitions || []) {
|
||||
appendDictionaryNames(names, definition);
|
||||
}
|
||||
for (const frequency of entry?.frequencies || []) {
|
||||
appendDictionaryNames(names, frequency);
|
||||
}
|
||||
for (const pronunciation of entry?.pronunciations || []) {
|
||||
appendDictionaryNames(names, pronunciation);
|
||||
}
|
||||
return names;
|
||||
}
|
||||
function isNameDictionaryEntry(entry) {
|
||||
if (!includeNameMatchMetadata || !entry || typeof entry !== 'object') {
|
||||
return false;
|
||||
}
|
||||
return getDictionaryEntryNames(entry).some((name) => name.startsWith("SubMiner Character Dictionary"));
|
||||
}
|
||||
function hasExactPrimarySource(headword, token) {
|
||||
for (const src of headword.sources || []) {
|
||||
if (src.originalText !== token) { continue; }
|
||||
if (!src.isPrimary) { continue; }
|
||||
if (src.matchType !== 'exact') { continue; }
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
let matchedNameDictionary = false;
|
||||
if (includeNameMatchMetadata) {
|
||||
for (const dictionaryEntry of dictionaryEntries || []) {
|
||||
if (!isNameDictionaryEntry(dictionaryEntry)) { continue; }
|
||||
for (const headword of dictionaryEntry.headwords || []) {
|
||||
if (!hasExactPrimarySource(headword, token)) { continue; }
|
||||
matchedNameDictionary = true;
|
||||
break;
|
||||
}
|
||||
if (matchedNameDictionary) { break; }
|
||||
}
|
||||
}
|
||||
for (const dictionaryEntry of dictionaryEntries || []) {
|
||||
for (const headword of dictionaryEntry.headwords || []) {
|
||||
if (!hasExactPrimarySource(headword, token)) { continue; }
|
||||
return {
|
||||
term: headword.term,
|
||||
reading: headword.reading,
|
||||
isNameMatch: matchedNameDictionary || isNameDictionaryEntry(dictionaryEntry)
|
||||
};
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
`;
|
||||
|
||||
function buildYomitanScanningScript(
|
||||
text: string,
|
||||
profileIndex: number,
|
||||
scanLength: number,
|
||||
includeNameMatchMetadata: boolean,
|
||||
): string {
|
||||
return `
|
||||
(async () => {
|
||||
const invoke = (action, params) =>
|
||||
new Promise((resolve, reject) => {
|
||||
chrome.runtime.sendMessage({ action, params }, (response) => {
|
||||
if (chrome.runtime.lastError) {
|
||||
reject(new Error(chrome.runtime.lastError.message));
|
||||
return;
|
||||
}
|
||||
if (!response || typeof response !== "object") {
|
||||
reject(new Error("Invalid response from Yomitan backend"));
|
||||
return;
|
||||
}
|
||||
if (response.error) {
|
||||
reject(new Error(response.error.message || "Yomitan backend error"));
|
||||
return;
|
||||
}
|
||||
resolve(response.result);
|
||||
});
|
||||
});
|
||||
${YOMITAN_SCANNING_HELPERS}
|
||||
const includeNameMatchMetadata = ${includeNameMatchMetadata ? 'true' : 'false'};
|
||||
const text = ${JSON.stringify(text)};
|
||||
const details = {matchType: "exact", deinflect: true};
|
||||
const tokens = [];
|
||||
let i = 0;
|
||||
while (i < text.length) {
|
||||
const codePoint = text.codePointAt(i);
|
||||
const character = String.fromCodePoint(codePoint);
|
||||
const substring = text.substring(i, i + ${scanLength});
|
||||
const result = await invoke("termsFind", { text: substring, details, optionsContext: { index: ${profileIndex} } });
|
||||
const dictionaryEntries = Array.isArray(result?.dictionaryEntries) ? result.dictionaryEntries : [];
|
||||
const originalTextLength = typeof result?.originalTextLength === "number" ? result.originalTextLength : 0;
|
||||
if (dictionaryEntries.length > 0 && originalTextLength > 0 && (originalTextLength !== character.length || isCodePointJapanese(codePoint))) {
|
||||
const source = substring.substring(0, originalTextLength);
|
||||
const preferredHeadword = getPreferredHeadword(dictionaryEntries, source);
|
||||
if (preferredHeadword && typeof preferredHeadword.term === "string") {
|
||||
const reading = typeof preferredHeadword.reading === "string" ? preferredHeadword.reading : "";
|
||||
const segments = distributeFuriganaInflected(preferredHeadword.term, reading, source);
|
||||
tokens.push({
|
||||
surface: segments.map((segment) => segment.text).join("") || source,
|
||||
reading: segments.map((segment) => typeof segment.reading === "string" ? segment.reading : "").join(""),
|
||||
headword: preferredHeadword.term,
|
||||
startPos: i,
|
||||
endPos: i + originalTextLength,
|
||||
isNameMatch: includeNameMatchMetadata && preferredHeadword.isNameMatch === true,
|
||||
});
|
||||
i += originalTextLength;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
i += character.length;
|
||||
}
|
||||
return tokens;
|
||||
})();
|
||||
`;
|
||||
}
|
||||
|
||||
export async function requestYomitanParseResults(
|
||||
text: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
@@ -583,6 +1003,61 @@ export async function requestYomitanParseResults(
|
||||
}
|
||||
}
|
||||
|
||||
export async function requestYomitanScanTokens(
|
||||
text: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
options?: {
|
||||
includeNameMatchMetadata?: boolean;
|
||||
},
|
||||
): Promise<YomitanScanToken[] | null> {
|
||||
const yomitanExt = deps.getYomitanExt();
|
||||
if (!text || !yomitanExt) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const isReady = await ensureYomitanParserWindow(deps, logger);
|
||||
const parserWindow = deps.getYomitanParserWindow();
|
||||
if (!isReady || !parserWindow || parserWindow.isDestroyed()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const metadata = await requestYomitanProfileMetadata(parserWindow, logger);
|
||||
const profileIndex = metadata?.profileIndex ?? 0;
|
||||
const scanLength = metadata?.scanLength ?? DEFAULT_YOMITAN_SCAN_LENGTH;
|
||||
|
||||
try {
|
||||
const rawResult = await parserWindow.webContents.executeJavaScript(
|
||||
buildYomitanScanningScript(
|
||||
text,
|
||||
profileIndex,
|
||||
scanLength,
|
||||
options?.includeNameMatchMetadata === true,
|
||||
),
|
||||
true,
|
||||
);
|
||||
if (isScanTokenArray(rawResult)) {
|
||||
return rawResult;
|
||||
}
|
||||
if (Array.isArray(rawResult)) {
|
||||
const selectedTokens = selectYomitanParseTokens(rawResult, () => false, 'headword');
|
||||
return (
|
||||
selectedTokens?.map((token) => ({
|
||||
surface: token.surface,
|
||||
reading: token.reading,
|
||||
headword: token.headword,
|
||||
startPos: token.startPos,
|
||||
endPos: token.endPos,
|
||||
})) ?? null
|
||||
);
|
||||
}
|
||||
return null;
|
||||
} catch (err) {
|
||||
logger.error('Yomitan scanner request failed:', (err as Error).message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function fetchYomitanTermFrequencies(
|
||||
parserWindow: BrowserWindow,
|
||||
termReadingList: YomitanTermReadingPair[],
|
||||
@@ -963,3 +1438,325 @@ export async function syncYomitanDefaultAnkiServer(
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function buildYomitanInvokeScript(actionLiteral: string, paramsLiteral: string): string {
|
||||
return `
|
||||
(async () => {
|
||||
const invoke = (action, params) =>
|
||||
new Promise((resolve, reject) => {
|
||||
chrome.runtime.sendMessage({ action, params }, (response) => {
|
||||
if (chrome.runtime.lastError) {
|
||||
reject(new Error(chrome.runtime.lastError.message));
|
||||
return;
|
||||
}
|
||||
if (!response || typeof response !== "object") {
|
||||
reject(new Error("Invalid response from Yomitan backend"));
|
||||
return;
|
||||
}
|
||||
if (response.error) {
|
||||
reject(new Error(response.error.message || "Yomitan backend error"));
|
||||
return;
|
||||
}
|
||||
resolve(response.result);
|
||||
});
|
||||
});
|
||||
|
||||
return await invoke(${actionLiteral}, ${paramsLiteral});
|
||||
})();
|
||||
`;
|
||||
}
|
||||
|
||||
async function invokeYomitanBackendAction<T>(
|
||||
action: string,
|
||||
params: unknown,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<T | null> {
|
||||
const isReady = await ensureYomitanParserWindow(deps, logger);
|
||||
const parserWindow = deps.getYomitanParserWindow();
|
||||
if (!isReady || !parserWindow || parserWindow.isDestroyed()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const script = buildYomitanInvokeScript(
|
||||
JSON.stringify(action),
|
||||
params === undefined ? 'undefined' : JSON.stringify(params),
|
||||
);
|
||||
|
||||
try {
|
||||
return (await parserWindow.webContents.executeJavaScript(script, true)) as T;
|
||||
} catch (err) {
|
||||
logger.error(`Yomitan backend action failed (${action}):`, (err as Error).message);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function createDefaultDictionarySettings(name: string, enabled: boolean): Record<string, unknown> {
|
||||
return {
|
||||
name,
|
||||
alias: name,
|
||||
enabled,
|
||||
allowSecondarySearches: false,
|
||||
definitionsCollapsible: 'not-collapsible',
|
||||
partsOfSpeechFilter: true,
|
||||
useDeinflections: true,
|
||||
styles: '',
|
||||
};
|
||||
}
|
||||
|
||||
function getTargetProfileIndices(
|
||||
optionsFull: Record<string, unknown>,
|
||||
profileScope: 'all' | 'active',
|
||||
): number[] {
|
||||
const profiles = Array.isArray(optionsFull.profiles) ? optionsFull.profiles : [];
|
||||
if (profileScope === 'active') {
|
||||
const profileCurrent =
|
||||
typeof optionsFull.profileCurrent === 'number' && Number.isFinite(optionsFull.profileCurrent)
|
||||
? Math.max(0, Math.floor(optionsFull.profileCurrent))
|
||||
: 0;
|
||||
return profileCurrent < profiles.length ? [profileCurrent] : [];
|
||||
}
|
||||
return profiles.map((_profile, index) => index);
|
||||
}
|
||||
|
||||
export async function getYomitanDictionaryInfo(
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<YomitanDictionaryInfo[]> {
|
||||
const result = await invokeYomitanBackendAction<unknown>(
|
||||
'getDictionaryInfo',
|
||||
undefined,
|
||||
deps,
|
||||
logger,
|
||||
);
|
||||
if (!Array.isArray(result)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return result
|
||||
.filter((entry): entry is Record<string, unknown> => isObject(entry))
|
||||
.map((entry) => {
|
||||
const title = typeof entry.title === 'string' ? entry.title.trim() : '';
|
||||
const revision = entry.revision;
|
||||
return {
|
||||
title,
|
||||
revision:
|
||||
typeof revision === 'string' || typeof revision === 'number' ? revision : undefined,
|
||||
};
|
||||
})
|
||||
.filter((entry) => entry.title.length > 0);
|
||||
}
|
||||
|
||||
export async function getYomitanSettingsFull(
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<Record<string, unknown> | null> {
|
||||
const result = await invokeYomitanBackendAction<unknown>(
|
||||
'optionsGetFull',
|
||||
undefined,
|
||||
deps,
|
||||
logger,
|
||||
);
|
||||
return isObject(result) ? result : null;
|
||||
}
|
||||
|
||||
export async function setYomitanSettingsFull(
|
||||
value: Record<string, unknown>,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
source = 'subminer',
|
||||
): Promise<boolean> {
|
||||
const result = await invokeYomitanBackendAction<unknown>(
|
||||
'setAllSettings',
|
||||
{ value, source },
|
||||
deps,
|
||||
logger,
|
||||
);
|
||||
return result !== null;
|
||||
}
|
||||
|
||||
export async function importYomitanDictionaryFromZip(
|
||||
zipPath: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<boolean> {
|
||||
const normalizedZipPath = zipPath.trim();
|
||||
if (!normalizedZipPath || !fs.existsSync(normalizedZipPath)) {
|
||||
logger.error(`Dictionary ZIP not found: ${zipPath}`);
|
||||
return false;
|
||||
}
|
||||
|
||||
const archiveBase64 = fs.readFileSync(normalizedZipPath).toString('base64');
|
||||
const script = `
|
||||
(async () => {
|
||||
await globalThis.__subminerYomitanSettingsAutomation.importDictionaryArchiveBase64(
|
||||
${JSON.stringify(archiveBase64)},
|
||||
${JSON.stringify(path.basename(normalizedZipPath))}
|
||||
);
|
||||
return true;
|
||||
})();
|
||||
`;
|
||||
const result = await invokeYomitanSettingsAutomation<boolean>(script, deps, logger);
|
||||
return result === true;
|
||||
}
|
||||
|
||||
export async function deleteYomitanDictionaryByTitle(
|
||||
dictionaryTitle: string,
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<boolean> {
|
||||
const normalizedTitle = dictionaryTitle.trim();
|
||||
if (!normalizedTitle) {
|
||||
return false;
|
||||
}
|
||||
const result = await invokeYomitanSettingsAutomation<boolean>(
|
||||
`
|
||||
(async () => {
|
||||
await globalThis.__subminerYomitanSettingsAutomation.deleteDictionary(
|
||||
${JSON.stringify(normalizedTitle)}
|
||||
);
|
||||
return true;
|
||||
})();
|
||||
`,
|
||||
deps,
|
||||
logger,
|
||||
);
|
||||
return result === true;
|
||||
}
|
||||
|
||||
export async function upsertYomitanDictionarySettings(
|
||||
dictionaryTitle: string,
|
||||
profileScope: 'all' | 'active',
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<boolean> {
|
||||
const normalizedTitle = dictionaryTitle.trim();
|
||||
if (!normalizedTitle) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const optionsFull = await getYomitanSettingsFull(deps, logger);
|
||||
if (!optionsFull) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const profiles = Array.isArray(optionsFull.profiles) ? optionsFull.profiles : [];
|
||||
const indices = getTargetProfileIndices(optionsFull, profileScope);
|
||||
let changed = false;
|
||||
|
||||
for (const index of indices) {
|
||||
const profile = profiles[index];
|
||||
if (!isObject(profile)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isObject(profile.options)) {
|
||||
profile.options = {};
|
||||
}
|
||||
const profileOptions = profile.options as Record<string, unknown>;
|
||||
if (!Array.isArray(profileOptions.dictionaries)) {
|
||||
profileOptions.dictionaries = [];
|
||||
}
|
||||
|
||||
const dictionaries = profileOptions.dictionaries as unknown[];
|
||||
const existingIndex = dictionaries.findIndex(
|
||||
(entry) =>
|
||||
isObject(entry) &&
|
||||
typeof (entry as { name?: unknown }).name === 'string' &&
|
||||
(entry as { name: string }).name.trim() === normalizedTitle,
|
||||
);
|
||||
|
||||
if (existingIndex >= 0) {
|
||||
const existing = dictionaries[existingIndex] as Record<string, unknown>;
|
||||
if (existing.enabled !== true) {
|
||||
existing.enabled = true;
|
||||
changed = true;
|
||||
}
|
||||
if (typeof existing.alias !== 'string' || existing.alias.trim().length === 0) {
|
||||
existing.alias = normalizedTitle;
|
||||
changed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
dictionaries.push(createDefaultDictionarySettings(normalizedTitle, true));
|
||||
changed = true;
|
||||
}
|
||||
|
||||
if (!changed) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return await setYomitanSettingsFull(optionsFull, deps, logger);
|
||||
}
|
||||
|
||||
export async function removeYomitanDictionarySettings(
|
||||
dictionaryTitle: string,
|
||||
profileScope: 'all' | 'active',
|
||||
mode: 'delete' | 'disable',
|
||||
deps: YomitanParserRuntimeDeps,
|
||||
logger: LoggerLike,
|
||||
): Promise<boolean> {
|
||||
const normalizedTitle = dictionaryTitle.trim();
|
||||
if (!normalizedTitle) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const optionsFull = await getYomitanSettingsFull(deps, logger);
|
||||
if (!optionsFull) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const profiles = Array.isArray(optionsFull.profiles) ? optionsFull.profiles : [];
|
||||
const indices = getTargetProfileIndices(optionsFull, profileScope);
|
||||
let changed = false;
|
||||
|
||||
for (const index of indices) {
|
||||
const profile = profiles[index];
|
||||
if (!isObject(profile) || !isObject(profile.options)) {
|
||||
continue;
|
||||
}
|
||||
const profileOptions = profile.options as Record<string, unknown>;
|
||||
if (!Array.isArray(profileOptions.dictionaries)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const dictionaries = profileOptions.dictionaries as unknown[];
|
||||
if (mode === 'delete') {
|
||||
const before = dictionaries.length;
|
||||
profileOptions.dictionaries = dictionaries.filter(
|
||||
(entry) =>
|
||||
!(
|
||||
isObject(entry) &&
|
||||
typeof (entry as { name?: unknown }).name === 'string' &&
|
||||
(entry as { name: string }).name.trim() === normalizedTitle
|
||||
),
|
||||
);
|
||||
if ((profileOptions.dictionaries as unknown[]).length !== before) {
|
||||
changed = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const entry of dictionaries) {
|
||||
if (
|
||||
!isObject(entry) ||
|
||||
typeof (entry as { name?: unknown }).name !== 'string' ||
|
||||
(entry as { name: string }).name.trim() !== normalizedTitle
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
const dictionaryEntry = entry as Record<string, unknown>;
|
||||
if (dictionaryEntry.enabled !== false) {
|
||||
dictionaryEntry.enabled = false;
|
||||
changed = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!changed) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return await setYomitanSettingsFull(optionsFull, deps, logger);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createHash } from 'node:crypto';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
@@ -17,6 +18,41 @@ function readManifestVersion(manifestPath: string): string | null {
|
||||
}
|
||||
}
|
||||
|
||||
export function hashDirectoryContents(dirPath: string): string | null {
|
||||
try {
|
||||
if (!fs.existsSync(dirPath) || !fs.statSync(dirPath).isDirectory()) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const hash = createHash('sha256');
|
||||
const queue = [''];
|
||||
while (queue.length > 0) {
|
||||
const relativeDir = queue.shift()!;
|
||||
const absoluteDir = path.join(dirPath, relativeDir);
|
||||
const entries = fs.readdirSync(absoluteDir, { withFileTypes: true });
|
||||
entries.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
for (const entry of entries) {
|
||||
const relativePath = path.join(relativeDir, entry.name);
|
||||
const normalizedRelativePath = relativePath.split(path.sep).join('/');
|
||||
hash.update(normalizedRelativePath);
|
||||
if (entry.isDirectory()) {
|
||||
queue.push(relativePath);
|
||||
continue;
|
||||
}
|
||||
if (!entry.isFile()) {
|
||||
continue;
|
||||
}
|
||||
hash.update(fs.readFileSync(path.join(dirPath, relativePath)));
|
||||
}
|
||||
}
|
||||
|
||||
return hash.digest('hex');
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function areFilesEqual(sourcePath: string, targetPath: string): boolean {
|
||||
if (!fs.existsSync(sourcePath) || !fs.existsSync(targetPath)) return false;
|
||||
try {
|
||||
@@ -49,5 +85,35 @@ export function shouldCopyYomitanExtension(sourceDir: string, targetDir: string)
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
const sourceHash = hashDirectoryContents(sourceDir);
|
||||
const targetHash = hashDirectoryContents(targetDir);
|
||||
return sourceHash === null || targetHash === null || sourceHash !== targetHash;
|
||||
}
|
||||
|
||||
export function ensureExtensionCopy(
|
||||
sourceDir: string,
|
||||
userDataPath: string,
|
||||
): {
|
||||
targetDir: string;
|
||||
copied: boolean;
|
||||
} {
|
||||
if (process.platform === 'win32') {
|
||||
return { targetDir: sourceDir, copied: false };
|
||||
}
|
||||
|
||||
const extensionsRoot = path.join(userDataPath, 'extensions');
|
||||
const targetDir = path.join(extensionsRoot, 'yomitan');
|
||||
|
||||
let shouldCopy = !fs.existsSync(targetDir);
|
||||
if (!shouldCopy) {
|
||||
shouldCopy = hashDirectoryContents(sourceDir) !== hashDirectoryContents(targetDir);
|
||||
}
|
||||
|
||||
if (shouldCopy) {
|
||||
fs.mkdirSync(extensionsRoot, { recursive: true });
|
||||
fs.rmSync(targetDir, { recursive: true, force: true });
|
||||
fs.cpSync(sourceDir, targetDir, { recursive: true });
|
||||
}
|
||||
|
||||
return { targetDir, copied: shouldCopy };
|
||||
}
|
||||
|
||||
@@ -4,7 +4,11 @@ import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import { shouldCopyYomitanExtension } from './yomitan-extension-copy';
|
||||
import { ensureExtensionCopy, shouldCopyYomitanExtension } from './yomitan-extension-copy';
|
||||
|
||||
function makeTempDir(prefix: string): string {
|
||||
return fs.mkdtempSync(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
function writeFile(filePath: string, content: string): void {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
@@ -12,41 +16,69 @@ function writeFile(filePath: string, content: string): void {
|
||||
}
|
||||
|
||||
test('shouldCopyYomitanExtension detects popup runtime script drift', () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'yomitan-copy-test-'));
|
||||
const tempRoot = makeTempDir('subminer-yomitan-copy-');
|
||||
const sourceDir = path.join(tempRoot, 'source');
|
||||
const targetDir = path.join(tempRoot, 'target');
|
||||
|
||||
writeFile(path.join(sourceDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
writeFile(path.join(targetDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'app', 'popup.js'), 'same-popup-script');
|
||||
writeFile(path.join(targetDir, 'js', 'app', 'popup.js'), 'same-popup-script');
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'display', 'popup-main.js'), 'source-popup-main');
|
||||
writeFile(path.join(targetDir, 'js', 'display', 'popup-main.js'), 'target-popup-main');
|
||||
|
||||
assert.equal(shouldCopyYomitanExtension(sourceDir, targetDir), true);
|
||||
});
|
||||
|
||||
test('shouldCopyYomitanExtension skips copy when versions and watched scripts match', () => {
|
||||
const tempRoot = fs.mkdtempSync(path.join(os.tmpdir(), 'yomitan-copy-test-'));
|
||||
test('shouldCopyYomitanExtension skips copy when extension contents match', () => {
|
||||
const tempRoot = makeTempDir('subminer-yomitan-copy-');
|
||||
const sourceDir = path.join(tempRoot, 'source');
|
||||
const targetDir = path.join(tempRoot, 'target');
|
||||
|
||||
writeFile(path.join(sourceDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
writeFile(path.join(targetDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'app', 'popup.js'), 'same-popup-script');
|
||||
writeFile(path.join(targetDir, 'js', 'app', 'popup.js'), 'same-popup-script');
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'display', 'popup-main.js'), 'same-popup-main');
|
||||
writeFile(path.join(targetDir, 'js', 'display', 'popup-main.js'), 'same-popup-main');
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'display', 'display.js'), 'same-display');
|
||||
writeFile(path.join(targetDir, 'js', 'display', 'display.js'), 'same-display');
|
||||
|
||||
writeFile(path.join(sourceDir, 'js', 'display', 'display-audio.js'), 'same-display-audio');
|
||||
writeFile(path.join(targetDir, 'js', 'display', 'display-audio.js'), 'same-display-audio');
|
||||
|
||||
assert.equal(shouldCopyYomitanExtension(sourceDir, targetDir), false);
|
||||
});
|
||||
|
||||
test('ensureExtensionCopy refreshes copied extension when display files change', () => {
|
||||
const sourceRoot = makeTempDir('subminer-yomitan-src-');
|
||||
const userDataRoot = makeTempDir('subminer-yomitan-user-');
|
||||
|
||||
const sourceDir = path.join(sourceRoot, 'yomitan');
|
||||
const targetDir = path.join(userDataRoot, 'extensions', 'yomitan');
|
||||
|
||||
fs.mkdirSync(path.join(sourceDir, 'js', 'display'), { recursive: true });
|
||||
fs.mkdirSync(path.join(targetDir, 'js', 'display'), { recursive: true });
|
||||
|
||||
fs.writeFileSync(path.join(sourceDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
fs.writeFileSync(path.join(targetDir, 'manifest.json'), JSON.stringify({ version: '1.0.0' }));
|
||||
fs.writeFileSync(
|
||||
path.join(sourceDir, 'js', 'display', 'structured-content-generator.js'),
|
||||
'new display code',
|
||||
);
|
||||
fs.writeFileSync(
|
||||
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
|
||||
'old display code',
|
||||
);
|
||||
|
||||
const result = ensureExtensionCopy(sourceDir, userDataRoot);
|
||||
|
||||
assert.equal(result.targetDir, targetDir);
|
||||
assert.equal(result.copied, true);
|
||||
assert.equal(
|
||||
fs.readFileSync(
|
||||
path.join(targetDir, 'js', 'display', 'structured-content-generator.js'),
|
||||
'utf8',
|
||||
),
|
||||
'new display code',
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import { BrowserWindow, Extension, session } from 'electron';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import { createLogger } from '../../logger';
|
||||
import { shouldCopyYomitanExtension } from './yomitan-extension-copy';
|
||||
import { ensureExtensionCopy } from './yomitan-extension-copy';
|
||||
import {
|
||||
getYomitanExtensionSearchPaths,
|
||||
resolveExistingYomitanExtensionPath,
|
||||
} from './yomitan-extension-paths';
|
||||
|
||||
const logger = createLogger('main:yomitan-extension-loader');
|
||||
|
||||
export interface YomitanExtensionLoaderDeps {
|
||||
userDataPath: string;
|
||||
extensionPath?: string;
|
||||
getYomitanParserWindow: () => BrowserWindow | null;
|
||||
setYomitanParserWindow: (window: BrowserWindow | null) => void;
|
||||
setYomitanParserReadyPromise: (promise: Promise<void> | null) => void;
|
||||
@@ -15,52 +19,28 @@ export interface YomitanExtensionLoaderDeps {
|
||||
setYomitanExtension: (extension: Extension | null) => void;
|
||||
}
|
||||
|
||||
function ensureExtensionCopy(sourceDir: string, userDataPath: string): string {
|
||||
if (process.platform === 'win32') {
|
||||
return sourceDir;
|
||||
}
|
||||
|
||||
const extensionsRoot = path.join(userDataPath, 'extensions');
|
||||
const targetDir = path.join(extensionsRoot, 'yomitan');
|
||||
|
||||
const shouldCopy = shouldCopyYomitanExtension(sourceDir, targetDir);
|
||||
|
||||
if (shouldCopy) {
|
||||
fs.mkdirSync(extensionsRoot, { recursive: true });
|
||||
fs.rmSync(targetDir, { recursive: true, force: true });
|
||||
fs.cpSync(sourceDir, targetDir, { recursive: true });
|
||||
logger.info(`Copied yomitan extension to ${targetDir}`);
|
||||
}
|
||||
|
||||
return targetDir;
|
||||
}
|
||||
|
||||
export async function loadYomitanExtension(
|
||||
deps: YomitanExtensionLoaderDeps,
|
||||
): Promise<Extension | null> {
|
||||
const searchPaths = [
|
||||
path.join(__dirname, '..', '..', 'vendor', 'yomitan'),
|
||||
path.join(__dirname, '..', '..', '..', 'vendor', 'yomitan'),
|
||||
path.join(process.resourcesPath, 'yomitan'),
|
||||
'/usr/share/SubMiner/yomitan',
|
||||
path.join(deps.userDataPath, 'yomitan'),
|
||||
];
|
||||
|
||||
let extPath: string | null = null;
|
||||
for (const p of searchPaths) {
|
||||
if (fs.existsSync(p)) {
|
||||
extPath = p;
|
||||
break;
|
||||
}
|
||||
}
|
||||
const searchPaths = getYomitanExtensionSearchPaths({
|
||||
explicitPath: deps.extensionPath,
|
||||
moduleDir: __dirname,
|
||||
resourcesPath: process.resourcesPath,
|
||||
userDataPath: deps.userDataPath,
|
||||
});
|
||||
let extPath = resolveExistingYomitanExtensionPath(searchPaths, fs.existsSync);
|
||||
|
||||
if (!extPath) {
|
||||
logger.error('Yomitan extension not found in any search path');
|
||||
logger.error('Install Yomitan to one of:', searchPaths);
|
||||
logger.error('Run `bun run build:yomitan` or install Yomitan to one of:', searchPaths);
|
||||
return null;
|
||||
}
|
||||
|
||||
extPath = ensureExtensionCopy(extPath, deps.userDataPath);
|
||||
const extensionCopy = ensureExtensionCopy(extPath, deps.userDataPath);
|
||||
if (extensionCopy.copied) {
|
||||
logger.info(`Copied yomitan extension to ${extensionCopy.targetDir}`);
|
||||
}
|
||||
extPath = extensionCopy.targetDir;
|
||||
|
||||
const parserWindow = deps.getYomitanParserWindow();
|
||||
if (parserWindow && !parserWindow.isDestroyed()) {
|
||||
|
||||
50
src/core/services/yomitan-extension-paths.test.ts
Normal file
50
src/core/services/yomitan-extension-paths.test.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
|
||||
import {
|
||||
getYomitanExtensionSearchPaths,
|
||||
resolveExistingYomitanExtensionPath,
|
||||
} from './yomitan-extension-paths';
|
||||
|
||||
test('getYomitanExtensionSearchPaths prioritizes generated build output before packaged fallbacks', () => {
|
||||
const searchPaths = getYomitanExtensionSearchPaths({
|
||||
cwd: '/repo',
|
||||
moduleDir: '/repo/dist/core/services',
|
||||
resourcesPath: '/opt/SubMiner/resources',
|
||||
userDataPath: '/Users/kyle/.config/SubMiner',
|
||||
});
|
||||
|
||||
assert.deepEqual(searchPaths, [
|
||||
path.join('/repo', 'build', 'yomitan'),
|
||||
path.join('/opt/SubMiner/resources', 'yomitan'),
|
||||
'/usr/share/SubMiner/yomitan',
|
||||
path.join('/Users/kyle/.config/SubMiner', 'yomitan'),
|
||||
]);
|
||||
});
|
||||
|
||||
test('resolveExistingYomitanExtensionPath returns first manifest-backed candidate', () => {
|
||||
const existing = new Set<string>([
|
||||
path.join('/repo', 'build', 'yomitan', 'manifest.json'),
|
||||
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext', 'manifest.json'),
|
||||
]);
|
||||
|
||||
const resolved = resolveExistingYomitanExtensionPath(
|
||||
[
|
||||
path.join('/repo', 'build', 'yomitan'),
|
||||
path.join('/repo', 'vendor', 'subminer-yomitan', 'ext'),
|
||||
],
|
||||
(candidate) => existing.has(candidate),
|
||||
);
|
||||
|
||||
assert.equal(resolved, path.join('/repo', 'build', 'yomitan'));
|
||||
});
|
||||
|
||||
test('resolveExistingYomitanExtensionPath ignores source tree without built manifest', () => {
|
||||
const resolved = resolveExistingYomitanExtensionPath(
|
||||
[path.join('/repo', 'vendor', 'subminer-yomitan', 'ext')],
|
||||
() => false,
|
||||
);
|
||||
|
||||
assert.equal(resolved, null);
|
||||
});
|
||||
60
src/core/services/yomitan-extension-paths.ts
Normal file
60
src/core/services/yomitan-extension-paths.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as path from 'node:path';
|
||||
|
||||
export interface YomitanExtensionPathOptions {
|
||||
explicitPath?: string;
|
||||
cwd?: string;
|
||||
moduleDir?: string;
|
||||
resourcesPath?: string;
|
||||
userDataPath?: string;
|
||||
}
|
||||
|
||||
function pushUnique(values: string[], candidate: string | null | undefined): void {
|
||||
if (!candidate || values.includes(candidate)) {
|
||||
return;
|
||||
}
|
||||
values.push(candidate);
|
||||
}
|
||||
|
||||
export function getYomitanExtensionSearchPaths(
|
||||
options: YomitanExtensionPathOptions = {},
|
||||
): string[] {
|
||||
const searchPaths: string[] = [];
|
||||
|
||||
pushUnique(searchPaths, options.explicitPath ? path.resolve(options.explicitPath) : null);
|
||||
pushUnique(searchPaths, options.cwd ? path.resolve(options.cwd, 'build', 'yomitan') : null);
|
||||
pushUnique(
|
||||
searchPaths,
|
||||
options.moduleDir
|
||||
? path.resolve(options.moduleDir, '..', '..', '..', 'build', 'yomitan')
|
||||
: null,
|
||||
);
|
||||
pushUnique(
|
||||
searchPaths,
|
||||
options.resourcesPath ? path.join(options.resourcesPath, 'yomitan') : null,
|
||||
);
|
||||
pushUnique(searchPaths, '/usr/share/SubMiner/yomitan');
|
||||
pushUnique(searchPaths, options.userDataPath ? path.join(options.userDataPath, 'yomitan') : null);
|
||||
|
||||
return searchPaths;
|
||||
}
|
||||
|
||||
export function resolveExistingYomitanExtensionPath(
|
||||
searchPaths: string[],
|
||||
existsSync: (path: string) => boolean = fs.existsSync,
|
||||
): string | null {
|
||||
for (const candidate of searchPaths) {
|
||||
if (existsSync(path.join(candidate, 'manifest.json'))) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function resolveYomitanExtensionPath(
|
||||
options: YomitanExtensionPathOptions = {},
|
||||
existsSync: (path: string) => boolean = fs.existsSync,
|
||||
): string | null {
|
||||
return resolveExistingYomitanExtensionPath(getYomitanExtensionSearchPaths(options), existsSync);
|
||||
}
|
||||
235
src/core/services/yomitan-structured-content-generator.test.ts
Normal file
235
src/core/services/yomitan-structured-content-generator.test.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import path from 'node:path';
|
||||
import test from 'node:test';
|
||||
import { pathToFileURL } from 'node:url';
|
||||
import { resolveYomitanExtensionPath } from './yomitan-extension-paths';
|
||||
|
||||
class FakeStyle {
|
||||
private values = new Map<string, string>();
|
||||
|
||||
set width(value: string) {
|
||||
this.values.set('width', value);
|
||||
}
|
||||
|
||||
get width(): string {
|
||||
return this.values.get('width') ?? '';
|
||||
}
|
||||
|
||||
set height(value: string) {
|
||||
this.values.set('height', value);
|
||||
}
|
||||
|
||||
get height(): string {
|
||||
return this.values.get('height') ?? '';
|
||||
}
|
||||
|
||||
set border(value: string) {
|
||||
this.values.set('border', value);
|
||||
}
|
||||
|
||||
set borderRadius(value: string) {
|
||||
this.values.set('borderRadius', value);
|
||||
}
|
||||
|
||||
set paddingTop(value: string) {
|
||||
this.values.set('paddingTop', value);
|
||||
}
|
||||
|
||||
setProperty(name: string, value: string): void {
|
||||
this.values.set(name, value);
|
||||
}
|
||||
|
||||
removeProperty(name: string): void {
|
||||
this.values.delete(name);
|
||||
}
|
||||
}
|
||||
|
||||
class FakeNode {
|
||||
public childNodes: Array<FakeNode | FakeTextNode> = [];
|
||||
public className = '';
|
||||
public dataset: Record<string, string> = {};
|
||||
public style = new FakeStyle();
|
||||
public textContent: string | null = null;
|
||||
public title = '';
|
||||
public href = '';
|
||||
public rel = '';
|
||||
public target = '';
|
||||
public width = 0;
|
||||
public height = 0;
|
||||
public parentNode: FakeNode | null = null;
|
||||
|
||||
constructor(public readonly tagName: string) {}
|
||||
|
||||
appendChild(node: FakeNode | FakeTextNode): FakeNode | FakeTextNode {
|
||||
if (node instanceof FakeNode) {
|
||||
node.parentNode = this;
|
||||
}
|
||||
this.childNodes.push(node);
|
||||
return node;
|
||||
}
|
||||
|
||||
addEventListener(): void {}
|
||||
|
||||
closest(selector: string): FakeNode | null {
|
||||
if (!selector.startsWith('.')) {
|
||||
return null;
|
||||
}
|
||||
const className = selector.slice(1);
|
||||
let current: FakeNode | null = this;
|
||||
while (current) {
|
||||
if (current.className === className) {
|
||||
return current;
|
||||
}
|
||||
current = current.parentNode;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
removeAttribute(name: string): void {
|
||||
if (name === 'src') {
|
||||
return;
|
||||
}
|
||||
if (name === 'href') {
|
||||
this.href = '';
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
class FakeImageElement extends FakeNode {
|
||||
public onload: (() => void) | null = null;
|
||||
public onerror: ((error: unknown) => void) | null = null;
|
||||
private _src = '';
|
||||
|
||||
constructor() {
|
||||
super('img');
|
||||
}
|
||||
|
||||
set src(value: string) {
|
||||
this._src = value;
|
||||
this.onload?.();
|
||||
}
|
||||
|
||||
get src(): string {
|
||||
return this._src;
|
||||
}
|
||||
}
|
||||
|
||||
class FakeCanvasElement extends FakeNode {
|
||||
constructor() {
|
||||
super('canvas');
|
||||
}
|
||||
}
|
||||
|
||||
class FakeTextNode {
|
||||
constructor(public readonly data: string) {}
|
||||
}
|
||||
|
||||
class FakeDocument {
|
||||
createElement(tagName: string): FakeNode {
|
||||
if (tagName === 'img') {
|
||||
return new FakeImageElement();
|
||||
}
|
||||
if (tagName === 'canvas') {
|
||||
return new FakeCanvasElement();
|
||||
}
|
||||
return new FakeNode(tagName);
|
||||
}
|
||||
|
||||
createTextNode(data: string): FakeTextNode {
|
||||
return new FakeTextNode(data);
|
||||
}
|
||||
}
|
||||
|
||||
function findFirstByClass(node: FakeNode, className: string): FakeNode | null {
|
||||
if (node.className === className) {
|
||||
return node;
|
||||
}
|
||||
for (const child of node.childNodes) {
|
||||
if (child instanceof FakeNode) {
|
||||
const result = findFirstByClass(child, className);
|
||||
if (result) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
test('StructuredContentGenerator uses direct img loading for popup glossary images', async () => {
|
||||
const yomitanRoot = resolveYomitanExtensionPath({ cwd: process.cwd() });
|
||||
assert.ok(yomitanRoot, 'Run `bun run build:yomitan` before Yomitan integration tests.');
|
||||
|
||||
const { DisplayContentManager } = await import(
|
||||
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'display-content-manager.js')).href
|
||||
);
|
||||
const { StructuredContentGenerator } = await import(
|
||||
pathToFileURL(path.join(yomitanRoot, 'js', 'display', 'structured-content-generator.js')).href
|
||||
);
|
||||
|
||||
const createObjectURLCalls: string[] = [];
|
||||
const revokeObjectURLCalls: string[] = [];
|
||||
const originalHtmlImageElement = globalThis.HTMLImageElement;
|
||||
const originalHtmlCanvasElement = globalThis.HTMLCanvasElement;
|
||||
const originalCreateObjectURL = URL.createObjectURL;
|
||||
const originalRevokeObjectURL = URL.revokeObjectURL;
|
||||
globalThis.HTMLImageElement = FakeImageElement as unknown as typeof HTMLImageElement;
|
||||
globalThis.HTMLCanvasElement = FakeCanvasElement as unknown as typeof HTMLCanvasElement;
|
||||
URL.createObjectURL = (_blob: Blob) => {
|
||||
const value = 'blob:test-image';
|
||||
createObjectURLCalls.push(value);
|
||||
return value;
|
||||
};
|
||||
URL.revokeObjectURL = (value: string) => {
|
||||
revokeObjectURLCalls.push(value);
|
||||
};
|
||||
|
||||
try {
|
||||
const manager = new DisplayContentManager({
|
||||
application: {
|
||||
api: {
|
||||
getMedia: async () => [
|
||||
{
|
||||
content: Buffer.from('png-bytes').toString('base64'),
|
||||
mediaType: 'image/png',
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const generator = new StructuredContentGenerator(manager, new FakeDocument(), {
|
||||
devicePixelRatio: 1,
|
||||
navigator: { userAgent: 'Mozilla/5.0' },
|
||||
});
|
||||
|
||||
const node = generator.createDefinitionImage(
|
||||
{
|
||||
tag: 'img',
|
||||
path: 'img/test.png',
|
||||
width: 8,
|
||||
height: 11,
|
||||
title: 'Alpha',
|
||||
background: true,
|
||||
},
|
||||
'SubMiner Character Dictionary',
|
||||
) as FakeNode;
|
||||
|
||||
await manager.executeMediaRequests();
|
||||
|
||||
const imageNode = findFirstByClass(node, 'gloss-image');
|
||||
assert.ok(imageNode);
|
||||
assert.equal(imageNode.tagName, 'img');
|
||||
assert.equal((imageNode as FakeImageElement).src, 'blob:test-image');
|
||||
assert.equal(node.dataset.imageLoadState, 'loaded');
|
||||
assert.equal(node.dataset.hasImage, 'true');
|
||||
assert.deepEqual(createObjectURLCalls, ['blob:test-image']);
|
||||
|
||||
manager.unloadAll();
|
||||
assert.deepEqual(revokeObjectURLCalls, ['blob:test-image']);
|
||||
} finally {
|
||||
globalThis.HTMLImageElement = originalHtmlImageElement;
|
||||
globalThis.HTMLCanvasElement = originalHtmlCanvasElement;
|
||||
URL.createObjectURL = originalCreateObjectURL;
|
||||
URL.revokeObjectURL = originalRevokeObjectURL;
|
||||
}
|
||||
});
|
||||
Reference in New Issue
Block a user