Complete TASK-12 bundling and tokenizer test type fixes

This commit is contained in:
2026-02-15 18:16:46 -08:00
parent 47aeabbc58
commit 1ca9cbc20d
3 changed files with 108 additions and 1 deletions

View File

@@ -4,7 +4,8 @@
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"main": "dist/main.js",
"scripts": {
"build": "tsc && tsc -p tsconfig.renderer.json && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && bash scripts/build-macos-helper.sh",
"build": "tsc && pnpm run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && bash scripts/build-macos-helper.sh",
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
"check:main-lines": "bash scripts/check-main-lines.sh",
"check:main-lines:baseline": "bash scripts/check-main-lines.sh 5300",
"check:main-lines:gate1": "bash scripts/check-main-lines.sh 4500",
@@ -55,6 +56,7 @@
"@types/node": "^25.2.2",
"@types/ws": "^8.18.1",
"electron": "^37.10.3",
"esbuild": "^0.21.5",
"electron-builder": "^26.7.0",
"typescript": "^5.9.3",
"vitepress": "^1.6.3"

3
pnpm-lock.yaml generated
View File

@@ -36,6 +36,9 @@ importers:
electron-builder:
specifier: ^26.7.0
version: 26.7.0(electron-builder-squirrel-windows@26.7.0)
esbuild:
specifier: ^0.21.5
version: 0.21.5
typescript:
specifier: ^5.9.3
version: 5.9.3

View File

@@ -88,6 +88,108 @@ test("tokenizeSubtitleService assigns JLPT level to parsed Yomitan tokens", asyn
assert.equal(result.tokens?.[0]?.jlptLevel, "N5");
});
test("tokenizeSubtitleService caches JLPT lookups across repeated tokens", async () => {
let lookupCalls = 0;
const result = await tokenizeSubtitleService(
"猫猫",
makeDepsFromMecabTokenizer(async () => [
{
word: "猫",
partOfSpeech: PartOfSpeech.noun,
pos1: "",
pos2: "",
pos3: "",
pos4: "",
inflectionType: "",
inflectionForm: "",
headword: "猫",
katakanaReading: "ネコ",
pronunciation: "ネコ",
},
{
word: "猫",
partOfSpeech: PartOfSpeech.noun,
pos1: "",
pos2: "",
pos3: "",
pos4: "",
inflectionType: "",
inflectionForm: "",
headword: "猫",
katakanaReading: "ネコ",
pronunciation: "ネコ",
},
], {
getJlptLevel: (text) => {
lookupCalls += 1;
return text === "猫" ? "N5" : null;
},
}),
);
assert.equal(result.tokens?.length, 2);
assert.equal(lookupCalls, 1);
assert.equal(result.tokens?.[0]?.jlptLevel, "N5");
assert.equal(result.tokens?.[1]?.jlptLevel, "N5");
});
test("tokenizeSubtitleService leaves JLPT unset for non-matching tokens", async () => {
const result = await tokenizeSubtitleService(
"猫",
makeDepsFromMecabTokenizer(async () => [
{
word: "猫",
partOfSpeech: PartOfSpeech.noun,
pos1: "",
pos2: "",
pos3: "",
pos4: "",
inflectionType: "",
inflectionForm: "",
headword: "猫",
katakanaReading: "ネコ",
pronunciation: "ネコ",
},
], {
getJlptLevel: () => null,
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.jlptLevel, undefined);
});
test("tokenizeSubtitleService skips JLPT lookups when disabled", async () => {
let lookupCalls = 0;
const result = await tokenizeSubtitleService(
"猫です",
makeDeps({
tokenizeWithMecab: async () => [
{
headword: "猫",
surface: "猫",
reading: "ネコ",
startPos: 0,
endPos: 1,
partOfSpeech: PartOfSpeech.noun,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
},
],
getJlptLevel: () => {
lookupCalls += 1;
return "N5";
},
getJlptEnabled: () => false,
}),
);
assert.equal(result.tokens?.length, 1);
assert.equal(result.tokens?.[0]?.jlptLevel, undefined);
assert.equal(lookupCalls, 0);
});
test("tokenizeSubtitleService skips JLPT level for excluded demonstratives", async () => {
const result = await tokenizeSubtitleService(
"この",