feat: integrate n+1 target highlighting

- Merge feature branch changes for n+1 target-only highlight flow

- Extend merged token model and token-merger to mark exactly-one unknown targets

- Thread n+1 candidate metadata through tokenizer and config systems

- Update subtitle renderer/state to route configured colors and new token class

- Resolve merge conflicts in core service tests, including subtitle and subsync behavior
This commit is contained in:
2026-02-15 02:36:48 -08:00
parent 88099e2ffa
commit 3a27c026b6
16 changed files with 494 additions and 66 deletions

View File

@@ -186,6 +186,63 @@ test("accepts valid ankiConnect n+1 match mode values", () => {
assert.equal(config.ankiConnect.nPlusOne.matchMode, "surface");
});
test("validates ankiConnect n+1 color values", () => {
const dir = makeTempDir();
fs.writeFileSync(
path.join(dir, "config.jsonc"),
`{
"ankiConnect": {
"nPlusOne": {
"nPlusOne": "not-a-color",
"knownWord": 123
}
}
}`,
"utf-8",
);
const service = new ConfigService(dir);
const config = service.getConfig();
const warnings = service.getWarnings();
assert.equal(
config.ankiConnect.nPlusOne.nPlusOne,
DEFAULT_CONFIG.ankiConnect.nPlusOne.nPlusOne,
);
assert.equal(
config.ankiConnect.nPlusOne.knownWord,
DEFAULT_CONFIG.ankiConnect.nPlusOne.knownWord,
);
assert.ok(
warnings.some((warning) => warning.path === "ankiConnect.nPlusOne.nPlusOne"),
);
assert.ok(
warnings.some((warning) => warning.path === "ankiConnect.nPlusOne.knownWord"),
);
});
test("accepts valid ankiConnect n+1 color values", () => {
const dir = makeTempDir();
fs.writeFileSync(
path.join(dir, "config.jsonc"),
`{
"ankiConnect": {
"nPlusOne": {
"nPlusOne": "#c6a0f6",
"knownWord": "#a6da95"
}
}
}`,
"utf-8",
);
const service = new ConfigService(dir);
const config = service.getConfig();
assert.equal(config.ankiConnect.nPlusOne.nPlusOne, "#c6a0f6");
assert.equal(config.ankiConnect.nPlusOne.knownWord, "#a6da95");
});
test("supports legacy ankiConnect.behavior N+1 settings as fallback", () => {
const dir = makeTempDir();
fs.writeFileSync(
@@ -268,5 +325,8 @@ test("template generator includes known keys", () => {
assert.match(output, /"ankiConnect":/);
assert.match(output, /"websocket":/);
assert.match(output, /"youtubeSubgen":/);
assert.match(output, /"nPlusOne"\s*:\s*\{/);
assert.match(output, /"nPlusOne": "#c6a0f6"/);
assert.match(output, /"knownWord": "#a6da95"/);
assert.match(output, /auto-generated from src\/config\/definitions.ts/);
});

View File

@@ -128,6 +128,8 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
refreshMinutes: 1440,
matchMode: "headword",
decks: [],
nPlusOne: "#c6a0f6",
knownWord: "#a6da95",
},
metadata: {
pattern: "[SubMiner] %f (%t)",
@@ -179,6 +181,8 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
fontWeight: "normal",
fontStyle: "normal",
backgroundColor: "rgba(54, 58, 79, 0.5)",
nPlusOneColor: "#c6a0f6",
knownWordColor: "#a6da95",
secondary: {
fontSize: 24,
fontColor: "#ffffff",
@@ -321,6 +325,18 @@ export const CONFIG_OPTION_REGISTRY: ConfigOptionRegistryEntry[] = [
description:
"Decks used for N+1 known-word cache scope. Supports one or more deck names.",
},
{
path: "ankiConnect.nPlusOne.nPlusOne",
kind: "string",
defaultValue: DEFAULT_CONFIG.ankiConnect.nPlusOne.nPlusOne,
description: "Color used for the single N+1 target token highlight.",
},
{
path: "ankiConnect.nPlusOne.knownWord",
kind: "string",
defaultValue: DEFAULT_CONFIG.ankiConnect.nPlusOne.knownWord,
description: "Color used for legacy known-word highlights.",
},
{
path: "ankiConnect.isKiku.fieldGrouping",
kind: "enum",

View File

@@ -36,6 +36,15 @@ function asBoolean(value: unknown): boolean | undefined {
return typeof value === "boolean" ? value : undefined;
}
const hexColorPattern =
/^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$/;
function asColor(value: unknown): string | undefined {
if (typeof value !== "string") return undefined;
const text = value.trim();
return hexColorPattern.test(text) ? text : undefined;
}
export class ConfigService {
private readonly configDir: string;
private readonly configFileJsonc: string;
@@ -751,6 +760,34 @@ export class ConfigService {
resolved.ankiConnect.nPlusOne.decks = [];
}
const nPlusOneHighlightColor = asColor(nPlusOneConfig.nPlusOne);
if (nPlusOneHighlightColor !== undefined) {
resolved.ankiConnect.nPlusOne.nPlusOne = nPlusOneHighlightColor;
} else if (nPlusOneConfig.nPlusOne !== undefined) {
warn(
"ankiConnect.nPlusOne.nPlusOne",
nPlusOneConfig.nPlusOne,
resolved.ankiConnect.nPlusOne.nPlusOne,
"Expected a hex color value.",
);
resolved.ankiConnect.nPlusOne.nPlusOne =
DEFAULT_CONFIG.ankiConnect.nPlusOne.nPlusOne;
}
const nPlusOneKnownWordColor = asColor(nPlusOneConfig.knownWord);
if (nPlusOneKnownWordColor !== undefined) {
resolved.ankiConnect.nPlusOne.knownWord = nPlusOneKnownWordColor;
} else if (nPlusOneConfig.knownWord !== undefined) {
warn(
"ankiConnect.nPlusOne.knownWord",
nPlusOneConfig.knownWord,
resolved.ankiConnect.nPlusOne.knownWord,
"Expected a hex color value.",
);
resolved.ankiConnect.nPlusOne.knownWord =
DEFAULT_CONFIG.ankiConnect.nPlusOne.knownWord;
}
if (
resolved.ankiConnect.isKiku.fieldGrouping !== "auto" &&
resolved.ankiConnect.isKiku.fieldGrouping !== "manual" &&

View File

@@ -12,17 +12,17 @@ test("sendToVisibleOverlayRuntimeService restores visibility flag when opening h
let visibleOverlayVisible = false;
const ok = sendToVisibleOverlayRuntimeService({
mainWindow: {
isDestroyed: () => false,
webContents: {
isLoading: () => false,
send: (...args: unknown[]) => {
sent.push(args);
},
mainWindow: {
isDestroyed: () => false,
webContents: {
isLoading: () => false,
send: (...args: unknown[]) => {
sent.push(args);
},
} as unknown as Electron.BrowserWindow,
},
} as unknown as Electron.BrowserWindow,
visibleOverlayVisible,
setVisibleOverlayVisible: (visible) => {
setVisibleOverlayVisible: (visible: boolean) => {
visibleOverlayVisible = visible;
},
channel: "runtime-options:open",

View File

@@ -313,7 +313,7 @@ test("runSubsyncManualService resolves string sid values from mpv stream propert
writeExecutableScript(alassPath, "#!/bin/sh\nexit 0\n");
writeExecutableScript(
ffsubsyncPath,
`#!/bin/sh\n: > "${ffsubsyncLogPath}"\nfor arg in "$@"; do\n printf '%s\\n' "$arg" >> "${ffsubsyncLogPath}"\ndone\nprev=""\nfor arg in "$@"; do\n if [ "$prev" = "-o" ]; then\n : > "$arg"\n fi\n prev="$arg"\ndone`,
`#!/bin/sh\nmkdir -p "${tmpDir}"\n: > "${ffsubsyncLogPath}"\nfor arg in "$@"; do printf '%s\\n' "$arg" >> "${ffsubsyncLogPath}"; done\nprev=""\nout=""\nfor arg in "$@"; do\n if [ "$prev" = "--reference-stream" ]; then :; fi\n if [ "$prev" = "-o" ]; then out="$arg"; fi\n prev="$arg"\ndone\nif [ -n "$out" ]; then : > "$out"; fi`,
);
const deps = makeDeps({
@@ -354,9 +354,11 @@ test("runSubsyncManualService resolves string sid values from mpv stream propert
assert.equal(result.ok, true);
assert.equal(result.message, "Subtitle synchronized with ffsubsync");
const ffsubsyncArgs = fs.readFileSync(ffsubsyncLogPath, "utf8").trim().split("\n");
const outputIndex = ffsubsyncArgs.findIndex((value) => value === "-o");
assert.ok(outputIndex >= 0);
const outputPath = ffsubsyncArgs[outputIndex + 1];
const ffArgs = fs.readFileSync(ffsubsyncLogPath, "utf8").trim().split("\n");
const syncOutputIndex = ffArgs.indexOf("-o");
assert.equal(syncOutputIndex >= 0, true);
const outputPath = ffArgs[syncOutputIndex + 1];
assert.equal(typeof outputPath, "string");
assert.ok(outputPath.length > 0);
assert.equal(fs.readFileSync(outputPath, "utf8"), "");
});

View File

@@ -69,6 +69,7 @@ test("tokenizeSubtitleService normalizes newlines before mecab fallback", async
partOfSpeech: PartOfSpeech.other,
isMerged: true,
isKnown: false,
isNPlusOneTarget: false,
},
];
},
@@ -94,6 +95,7 @@ test("tokenizeSubtitleService falls back to mecab tokens when available", async
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
],
}),
@@ -157,6 +159,7 @@ test("tokenizeSubtitleService uses Yomitan parser result when available", async
assert.equal(result.tokens?.[0]?.surface, "猫です");
assert.equal(result.tokens?.[0]?.reading, "ねこです");
assert.equal(result.tokens?.[0]?.isKnown, false);
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, true);
});
test("tokenizeSubtitleService marks tokens as known using callback", async () => {
@@ -185,6 +188,125 @@ test("tokenizeSubtitleService marks tokens as known using callback", async () =>
assert.equal(result.tokens?.[0]?.isKnown, true);
});
test("tokenizeSubtitleService selects one N+1 target token", async () => {
const result = await tokenizeSubtitleService(
"猫です",
makeDeps({
tokenizeWithMecab: async () => [
{
surface: "私",
reading: "ワタシ",
headword: "私",
startPos: 0,
endPos: 1,
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: true,
isNPlusOneTarget: false,
},
{
surface: "犬",
reading: "イヌ",
headword: "犬",
startPos: 1,
endPos: 2,
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
],
}),
);
const targets = result.tokens?.filter((token) => token.isNPlusOneTarget) ?? [];
assert.equal(targets.length, 1);
assert.equal(targets[0]?.surface, "犬");
});
test("tokenizeSubtitleService does not mark target when sentence has multiple candidates", async () => {
const result = await tokenizeSubtitleService(
"猫犬",
makeDeps({
tokenizeWithMecab: async () => [
{
surface: "猫",
reading: "ネコ",
headword: "猫",
startPos: 0,
endPos: 1,
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
{
surface: "犬",
reading: "イヌ",
headword: "犬",
startPos: 1,
endPos: 2,
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
],
}),
);
assert.equal(
result.tokens?.some((token) => token.isNPlusOneTarget),
false,
);
});
test("tokenizeSubtitleService applies N+1 target marking to Yomitan results", async () => {
const parserWindow = {
isDestroyed: () => false,
webContents: {
executeJavaScript: async () => [
{
source: "scanning-parser",
index: 0,
content: [
[
{
text: "猫",
reading: "ねこ",
headwords: [[{ term: "猫" }]],
},
],
[
{
text: "です",
reading: "です",
headwords: [[{ term: "です" }]],
},
],
],
},
],
},
} as unknown as Electron.BrowserWindow;
const result = await tokenizeSubtitleService(
"猫です",
makeDeps({
getYomitanExt: () => ({ id: "dummy-ext" } as any),
getYomitanParserWindow: () => parserWindow,
tokenizeWithMecab: async () => null,
isKnownWord: (text) => text === "です",
}),
);
assert.equal(result.text, "猫です");
assert.equal(result.tokens?.length, 2);
assert.equal(result.tokens?.[0]?.surface, "猫");
assert.equal(result.tokens?.[0]?.isNPlusOneTarget, true);
assert.equal(result.tokens?.[1]?.isNPlusOneTarget, false);
});
test("tokenizeSubtitleService checks known words by headword, not surface", async () => {
const result = await tokenizeSubtitleService(
"猫です",

View File

@@ -1,5 +1,5 @@
import { BrowserWindow, Extension, session } from "electron";
import { mergeTokens } from "../../token-merger";
import { markNPlusOneTargets, mergeTokens } from "../../token-merger";
import {
MergedToken,
NPlusOneMatchMode,
@@ -93,6 +93,25 @@ function resolveKnownWordText(
return matchMode === "surface" ? surface : headword;
}
function applyKnownWordMarking(
tokens: MergedToken[],
isKnownWord: (text: string) => boolean,
knownWordMatchMode: NPlusOneMatchMode,
): MergedToken[] {
return tokens.map((token) => {
const matchText = resolveKnownWordText(
token.surface,
token.headword,
knownWordMatchMode,
);
return {
...token,
isKnown: token.isKnown || (matchText ? isKnownWord(matchText) : false),
};
});
}
function extractYomitanHeadword(segment: YomitanParseSegment): string {
const headwords = segment.headwords;
if (!Array.isArray(headwords) || headwords.length === 0) {
@@ -187,6 +206,7 @@ function mapYomitanParseResultsToMergedTokens(
endPos: end,
partOfSpeech: PartOfSpeech.other,
isMerged: true,
isNPlusOneTarget: false,
isKnown: (() => {
const matchText = resolveKnownWordText(
surface,
@@ -368,13 +388,23 @@ export async function tokenizeSubtitleService(
const yomitanTokens = await parseWithYomitanInternalParser(tokenizeText, deps);
if (yomitanTokens && yomitanTokens.length > 0) {
return { text: displayText, tokens: yomitanTokens };
const knownMarkedTokens = applyKnownWordMarking(
yomitanTokens,
deps.isKnownWord,
deps.getKnownWordMatchMode(),
);
return { text: displayText, tokens: markNPlusOneTargets(knownMarkedTokens) };
}
try {
const mecabTokens = await deps.tokenizeWithMecab(tokenizeText);
if (mecabTokens && mecabTokens.length > 0) {
return { text: displayText, tokens: mecabTokens };
const knownMarkedTokens = applyKnownWordMarking(
mecabTokens,
deps.isKnownWord,
deps.getKnownWordMatchMode(),
);
return { text: displayText, tokens: markNPlusOneTargets(knownMarkedTokens) };
}
} catch (err) {
console.error("Tokenization error:", (err as Error).message);

View File

@@ -1335,7 +1335,18 @@ registerIpcRuntimeServices({
getCurrentSubtitleAss: () => appState.currentSubAssText,
getMpvSubtitleRenderMetrics: () => appState.mpvSubtitleRenderMetrics,
getSubtitlePosition: () => loadSubtitlePosition(),
getSubtitleStyle: () => getResolvedConfig().subtitleStyle ?? null,
getSubtitleStyle: () => {
const resolvedConfig = getResolvedConfig();
if (!resolvedConfig.subtitleStyle) {
return null;
}
return {
...resolvedConfig.subtitleStyle,
nPlusOneColor: resolvedConfig.ankiConnect.nPlusOne.nPlusOne,
knownWordColor: resolvedConfig.ankiConnect.nPlusOne.knownWord,
};
},
saveSubtitlePosition: (position: unknown) =>
saveSubtitlePosition(position as SubtitlePosition),
getMecabTokenizer: () => appState.mecabTokenizer,

View File

@@ -69,6 +69,9 @@ export type RendererState = {
lastHoverSelectionKey: string;
lastHoverSelectionNode: Text | null;
knownWordColor: string;
nPlusOneColor: string;
keybindingsMap: Map<string, (string | number)[]>;
chordPending: boolean;
chordTimeout: ReturnType<typeof setTimeout> | null;
@@ -125,6 +128,9 @@ export function createRendererState(): RendererState {
lastHoverSelectionKey: "",
lastHoverSelectionNode: null,
knownWordColor: "#a6da95",
nPlusOneColor: "#c6a0f6",
keybindingsMap: new Map(),
chordPending: false,
chordTimeout: null,

View File

@@ -248,6 +248,8 @@ body {
font-size: 35px;
line-height: 1.5;
color: #cad3f5;
--subtitle-known-word-color: #a6da95;
--subtitle-n-plus-one-color: #c6a0f6;
text-shadow:
2px 2px 4px rgba(0, 0, 0, 0.8),
-1px -1px 2px rgba(0, 0, 0, 0.5);
@@ -285,10 +287,15 @@ body.settings-modal-open #subtitleContainer {
}
#subtitleRoot .word.word-known {
color: #a6da95;
color: var(--subtitle-known-word-color, #a6da95);
text-shadow: 0 0 6px rgba(166, 218, 149, 0.35);
}
#subtitleRoot .word.word-n-plus-one {
color: var(--subtitle-n-plus-one-color, #c6a0f6);
text-shadow: 0 0 6px rgba(198, 160, 246, 0.35);
}
#subtitleRoot .word:hover {
background: rgba(255, 255, 255, 0.2);
border-radius: 3px;

View File

@@ -23,13 +23,13 @@ function renderWithTokens(root: HTMLElement, tokens: MergedToken[]): void {
if (surface.includes("\n")) {
const parts = surface.split("\n");
for (let i = 0; i < parts.length; i += 1) {
if (parts[i]) {
const span = document.createElement("span");
span.className = token.isKnown ? "word word-known" : "word";
span.textContent = parts[i];
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
for (let i = 0; i < parts.length; i += 1) {
if (parts[i]) {
const span = document.createElement("span");
span.className = computeWordClass(token);
span.textContent = parts[i];
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
fragment.appendChild(span);
}
if (i < parts.length - 1) {
@@ -40,7 +40,7 @@ function renderWithTokens(root: HTMLElement, tokens: MergedToken[]): void {
}
const span = document.createElement("span");
span.className = token.isKnown ? "word word-known" : "word";
span.className = computeWordClass(token);
span.textContent = surface;
if (token.reading) span.dataset.reading = token.reading;
if (token.headword) span.dataset.headword = token.headword;
@@ -50,6 +50,18 @@ function renderWithTokens(root: HTMLElement, tokens: MergedToken[]): void {
root.appendChild(fragment);
}
function computeWordClass(token: MergedToken): string {
if (token.isNPlusOneTarget) {
return "word word-n-plus-one";
}
if (token.isKnown) {
return "word word-known";
}
return "word";
}
function renderCharacterLevel(root: HTMLElement, text: string): void {
const fragment = document.createDocumentFragment();
@@ -173,6 +185,19 @@ export function createSubtitleRenderer(ctx: RendererContext) {
ctx.dom.subtitleContainer.style.background = style.backgroundColor;
}
const knownWordColor =
style.knownWordColor ?? ctx.state.knownWordColor ?? "#a6da95";
const nPlusOneColor =
style.nPlusOneColor ?? ctx.state.nPlusOneColor ?? "#c6a0f6";
ctx.state.knownWordColor = knownWordColor;
ctx.state.nPlusOneColor = nPlusOneColor;
ctx.dom.subtitleRoot.style.setProperty(
"--subtitle-known-word-color",
knownWordColor,
);
ctx.dom.subtitleRoot.style.setProperty("--subtitle-n-plus-one-color", nPlusOneColor);
const secondaryStyle = style.secondary;
if (!secondaryStyle) return;

View File

@@ -216,41 +216,132 @@ export function mergeTokens(
}
return mergedHeadword;
})();
result.push({
surface: prev.surface + token.word,
reading: prev.reading + tokenReading,
headword: prev.headword,
startPos: prev.startPos,
endPos: end,
partOfSpeech: prev.partOfSpeech,
isMerged: true,
isKnown: headwordForKnownMatch
? isKnownWord(headwordForKnownMatch)
: false,
});
} else {
const headwordForKnownMatch = (() => {
if (knownWordMatchMode === "surface") {
return token.word;
}
return token.headword;
})();
result.push({
surface: token.word,
reading: tokenReading,
headword: token.headword,
startPos: start,
endPos: end,
partOfSpeech: token.partOfSpeech,
isMerged: false,
isKnown: headwordForKnownMatch
? isKnownWord(headwordForKnownMatch)
: false,
});
}
result.push({
surface: prev.surface + token.word,
reading: prev.reading + tokenReading,
headword: prev.headword,
startPos: prev.startPos,
endPos: end,
partOfSpeech: prev.partOfSpeech,
pos2: prev.pos2 ?? token.pos2,
pos3: prev.pos3 ?? token.pos3,
isMerged: true,
isKnown: headwordForKnownMatch
? isKnownWord(headwordForKnownMatch)
: false,
isNPlusOneTarget: false,
});
} else {
const headwordForKnownMatch = (() => {
if (knownWordMatchMode === "surface") {
return token.word;
}
return token.headword;
})();
result.push({
surface: token.word,
reading: tokenReading,
headword: token.headword,
startPos: start,
endPos: end,
partOfSpeech: token.partOfSpeech,
pos2: token.pos2,
pos3: token.pos3,
isMerged: false,
isKnown: headwordForKnownMatch
? isKnownWord(headwordForKnownMatch)
: false,
isNPlusOneTarget: false,
});
}
lastStandaloneToken = token;
}
return result;
}
const SENTENCE_BOUNDARY_SURFACES = new Set(["。", "", "", "?", "!", "…", "\u2026"]);
export function isNPlusOneCandidateToken(token: MergedToken): boolean {
if (token.isKnown) {
return false;
}
if (token.partOfSpeech === PartOfSpeech.particle) {
return false;
}
if (token.partOfSpeech === PartOfSpeech.bound_auxiliary) {
return false;
}
if (token.partOfSpeech === PartOfSpeech.symbol) {
return false;
}
if (token.partOfSpeech === PartOfSpeech.noun && token.pos2 === "固有名詞") {
return false;
}
if (token.pos3 && token.pos3.startsWith("助数詞")) {
return false;
}
if (token.surface.trim().length === 0) {
return false;
}
return true;
}
function isSentenceBoundaryToken(token: MergedToken): boolean {
if (token.partOfSpeech !== PartOfSpeech.symbol) {
return false;
}
return SENTENCE_BOUNDARY_SURFACES.has(token.surface);
}
export function markNPlusOneTargets(tokens: MergedToken[]): MergedToken[] {
if (tokens.length === 0) {
return [];
}
const markedTokens = tokens.map((token) => ({
...token,
isNPlusOneTarget: false,
}));
let sentenceStart = 0;
const markSentence = (start: number, endExclusive: number): void => {
const sentenceCandidates: number[] = [];
for (let i = start; i < endExclusive; i++) {
if (isNPlusOneCandidateToken(markedTokens[i])) {
sentenceCandidates.push(i);
}
}
if (sentenceCandidates.length === 1) {
markedTokens[sentenceCandidates[0]] = {
...markedTokens[sentenceCandidates[0]],
isNPlusOneTarget: true,
};
}
};
for (let i = 0; i < markedTokens.length; i++) {
const token = markedTokens[i];
if (isSentenceBoundaryToken(token)) {
markSentence(sentenceStart, i);
sentenceStart = i + 1;
}
}
if (sentenceStart < markedTokens.length) {
markSentence(sentenceStart, markedTokens.length);
}
return markedTokens;
}

View File

@@ -48,8 +48,11 @@ export interface MergedToken {
startPos: number;
endPos: number;
partOfSpeech: PartOfSpeech;
pos2?: string;
pos3?: string;
isMerged: boolean;
isKnown: boolean;
isNPlusOneTarget: boolean;
}
export interface WindowGeometry {
@@ -230,6 +233,8 @@ export interface AnkiConnectConfig {
refreshMinutes?: number;
matchMode?: NPlusOneMatchMode;
decks?: string[];
nPlusOne?: string;
knownWord?: string;
};
behavior?: {
overwriteAudio?: boolean;
@@ -263,6 +268,8 @@ export interface SubtitleStyleConfig {
fontWeight?: string;
fontStyle?: string;
backgroundColor?: string;
nPlusOneColor?: string;
knownWordColor?: string;
secondary?: {
fontFamily?: string;
fontSize?: number;
@@ -378,6 +385,8 @@ export interface ResolvedConfig {
refreshMinutes: number;
matchMode: NPlusOneMatchMode;
decks: string[];
nPlusOne: string;
knownWord: string;
};
behavior: {
overwriteAudio: boolean;