refactor: remove root node and npm workflow deps

This commit is contained in:
2026-03-07 21:19:14 -08:00
parent f0418c6e56
commit d0c11d347b
32 changed files with 215 additions and 299 deletions

View File

@@ -24,18 +24,13 @@
SubMiner is an Electron overlay that sits on top of mpv. It turns your video player into a full sentence-mining workstation:
- **Hover to look up** — Yomitan dictionary popups directly on subtitles
- **Keyboard-driven lookup mode** — Navigate token-by-token, keep lookup open across tokens, and control popup scrolling/audio/mining without leaving the overlay
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and translation
- **Instant auto-enrichment** — Optional local AnkiConnect proxy enriches new Yomitan cards immediately
- **Reading annotations** — Combines N+1 targeting, frequency-dictionary highlighting, and JLPT underlining while you read
- **Hover-aware playback** — By default, hovering subtitle text pauses mpv and resumes on mouse leave (`subtitleStyle.autoPauseVideoOnHover`)
- **Subtitle tools** — Download from Jimaku, sync with alass/ffsubsync
- **Immersion tracking** — SQLite-powered stats on your watch time and mining activity
- **Custom texthooker page** — Built-in custom texthooker page and websocket, no extra setup
- **Annotated websocket API** — Dedicated annotation feed can serve bundled texthooker or external clients with rendered `sentence` HTML plus structured `tokens`
- **Jellyfin integration** — Remote playback setup, cast device mode, and direct playback launch
- **AniList progress** — Track episode completion and push watching progress automatically
- **Dictionary lookups** — Yomitan popups on subtitles with hover or full keyboard-driven navigation; hover-aware auto-pause keeps playback in sync
- **One-key mining** — Creates Anki cards with sentence, audio, screenshot, and AI-powered translation
- **Reading annotations** — N+1 targeting, frequency highlighting, and JLPT underlining while you watch
- **Subtitle tools** — Jimaku downloads, alass/ffsubsync sync, and whisper.cpp transcription for YouTube with optional AI cleanup
- **Texthooker** — Built-in texthooker page and annotated websocket API for external clients
- **Immersion tracking** — SQLite-powered stats on watch time and mining activity
- **Integrations** — Jellyfin remote playback, AniList episode progress, and AnkiConnect auto-enrichment
## Quick start
@@ -54,7 +49,7 @@ chmod +x ~/.local/bin/subminer
> [!NOTE]
> The `subminer` wrapper uses a [Bun](https://bun.sh) shebang. Make sure `bun` is on your `PATH`.
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Source builds now also require Node.js 22 + npm because bundled Yomitan is built from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
**From source** or **macOS** — initialize submodules first (`git submodule update --init --recursive`). Bundled Yomitan is built natively with Bun from the `vendor/subminer-yomitan` submodule into `build/yomitan` during `bun run build`, so Bun is the only JS runtime/package manager required for source builds. Full install guide: [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
### 2. Launch the app once
@@ -92,7 +87,7 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
| Required | Optional |
| ------------------------------------------ | -------------------------------------------------- |
| `bun`, `node` 22, `npm` | |
| `bun` | |
| `mpv` with IPC socket | `yt-dlp` |
| `ffmpeg` | `guessit` (better AniSkip title/episode detection) |
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
@@ -101,18 +96,7 @@ subminer --start video.mkv # optional explicit overlay start when plugin auto_st
## Documentation
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe).
## Testing
- Run `bun run test` or `bun run test:fast` for the default fast lane: config/core coverage plus representative entry/runtime, Anki integration, and main runtime checks.
- Run `bun run test:full` for the maintained test surface: Bun-compatible `src/**` coverage, Bun-compatible launcher unit coverage, and a Node compatibility lane for suites that depend on Electron named exports or `node:sqlite` behavior.
- Run `bun run test:node:compat` directly when you only need the Node-backed compatibility slice: `ipc`, `anki-jimaku-ipc`, `overlay-manager`, `config-validation`, `startup-config`, and runtime registry coverage.
- Run `bun run test:env` for environment-specific verification: launcher smoke/plugin checks plus the SQLite-backed immersion tracker lane.
- Run `bun run test:immersion:sqlite` when you specifically need real SQLite persistence coverage under Node with `--experimental-sqlite`.
- Run `bun run test:subtitle` for the maintained `alass`/`ffsubsync` subtitle surface.
The Bun-managed discovery lanes intentionally exclude a small set of suites that are currently Node-only because of Bun runtime/tooling gaps rather than product behavior: Electron named-export tests in `src/core/services/ipc.test.ts`, `src/core/services/anki-jimaku-ipc.test.ts`, and `src/core/services/overlay-manager.test.ts`, plus runtime/config tests in `src/main/config-validation.test.ts`, `src/main/runtime/startup-config.test.ts`, and `src/main/runtime/registry.test.ts`. `bun run test:node:compat` keeps those suites in the standard workflow instead of leaving them untracked.
For full guides on configuration, Anki, Jellyfin, and more, see [docs.subminer.moe](https://docs.subminer.moe). Contributor setup, build, and testing docs now live in the docs repo: [docs.subminer.moe/development#testing](https://docs.subminer.moe/development#testing).
## Acknowledgments

View File

@@ -0,0 +1,35 @@
---
id: TASK-120
title: 'Replace node:sqlite with libsql and remove Yomitan Node wrapper'
status: Done
assignee: []
created_date: '2026-03-08 04:14'
updated_date: '2026-03-08 04:39'
labels:
- runtime
- bun
- sqlite
- tech-debt
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Remove the remaining root Node requirement caused by immersion tracking SQLite usage and the old Yomitan build wrapper by migrating the local SQLite layer off node:sqlite, running the SQLite-backed verification lanes under Bun, and switching the vendored Yomitan build flow to Bun-native scripts.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Immersion tracker runtime no longer imports or requires node:sqlite
- [x] #2 SQLite-backed immersion tracker tests run under Bun without Node --experimental-sqlite
- [x] #3 Root build/test scripts no longer require the Yomitan Node wrapper or Node-based SQLite verification lanes
- [x] #4 README requirements/testing docs reflect the Bun-native workflow
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Replaced the immersion tracker SQLite dependency with a local libsql-backed wrapper, updated Bun/runtime compatibility tests to avoid process.exitCode side effects, switched Yomitan builds to run directly inside the vendored Bun-native project, deleted scripts/build-yomitan.mjs, and verified typecheck plus Bun build/test lanes (`build:yomitan`, `test:immersion:sqlite`, `test:runtime:compat`, `test:fast`).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -9,6 +9,7 @@
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",
"ws": "^8.19.0",
},
"devDependencies": {
@@ -99,10 +100,30 @@
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "7.1.2" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
"@libsql/darwin-arm64": ["@libsql/darwin-arm64@0.5.22", "", { "os": "darwin", "cpu": "arm64" }, "sha512-4B8ZlX3nIDPndfct7GNe0nI3Yw6ibocEicWdC4fvQbSs/jdq/RC2oCsoJxJ4NzXkvktX70C1J4FcmmoBy069UA=="],
"@libsql/darwin-x64": ["@libsql/darwin-x64@0.5.22", "", { "os": "darwin", "cpu": "x64" }, "sha512-ny2HYWt6lFSIdNFzUFIJ04uiW6finXfMNJ7wypkAD8Pqdm6nAByO+Fdqu8t7sD0sqJGeUCiOg480icjyQ2/8VA=="],
"@libsql/linux-arm-gnueabihf": ["@libsql/linux-arm-gnueabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-3Uo3SoDPJe/zBnyZKosziRGtszXaEtv57raWrZIahtQDsjxBVjuzYQinCm9LRCJCUT5t2r5Z5nLDPJi2CwZVoA=="],
"@libsql/linux-arm-musleabihf": ["@libsql/linux-arm-musleabihf@0.5.22", "", { "os": "linux", "cpu": "arm" }, "sha512-LCsXh07jvSojTNJptT9CowOzwITznD+YFGGW+1XxUr7fS+7/ydUrpDfsMX7UqTqjm7xG17eq86VkWJgHJfvpNg=="],
"@libsql/linux-arm64-gnu": ["@libsql/linux-arm64-gnu@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-KSdnOMy88c9mpOFKUEzPskSaF3VLflfSUCBwas/pn1/sV3pEhtMF6H8VUCd2rsedwoukeeCSEONqX7LLnQwRMA=="],
"@libsql/linux-arm64-musl": ["@libsql/linux-arm64-musl@0.5.22", "", { "os": "linux", "cpu": "arm64" }, "sha512-mCHSMAsDTLK5YH//lcV3eFEgiR23Ym0U9oEvgZA0667gqRZg/2px+7LshDvErEKv2XZ8ixzw3p1IrBzLQHGSsw=="],
"@libsql/linux-x64-gnu": ["@libsql/linux-x64-gnu@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-kNBHaIkSg78Y4BqAdgjcR2mBilZXs4HYkAmi58J+4GRwDQZh5fIUWbnQvB9f95DkWUIGVeenqLRFY2pcTmlsew=="],
"@libsql/linux-x64-musl": ["@libsql/linux-x64-musl@0.5.22", "", { "os": "linux", "cpu": "x64" }, "sha512-UZ4Xdxm4pu3pQXjvfJiyCzZop/9j/eA2JjmhMaAhe3EVLH2g11Fy4fwyUp9sT1QJYR1kpc2JLuybPM0kuXv/Tg=="],
"@libsql/win32-x64-msvc": ["@libsql/win32-x64-msvc@0.5.22", "", { "os": "win32", "cpu": "x64" }, "sha512-Fj0j8RnBpo43tVZUVoNK6BV/9AtDUM5S7DF3LB4qTYg1LMSZqi3yeCneUTLJD6XomQJlZzbI4mst89yspVSAnA=="],
"@malept/cross-spawn-promise": ["@malept/cross-spawn-promise@2.0.0", "", { "dependencies": { "cross-spawn": "7.0.6" } }, "sha512-1DpKU0Z5ThltBwjNySMC14g0CkbyhCaz9FkhxqNsZI6uAPJXFS8cMXlBKo26FJ8ZuW6S9GCMcR9IO5k2X5/9Fg=="],
"@malept/flatpak-bundler": ["@malept/flatpak-bundler@0.4.0", "", { "dependencies": { "debug": "4.4.3", "fs-extra": "9.1.0", "lodash": "4.17.23", "tmp-promise": "3.0.3" } }, "sha512-9QOtNffcOF/c1seMCDnjckb3R9WHcG34tky+FHpNKKCW0wc/scYLwMtO+ptyGUfMW0/b/n4qRiALlaFHc9Oj7Q=="],
"@neon-rs/load": ["@neon-rs/load@0.0.4", "", {}, "sha512-kTPhdZyTQxB+2wpiRcFWrDcejc4JI6tkPuS7UZCG4l6Zvc5kU/gGQ/ozvHTh1XR5tS+UlfAfGuPajjzQjCiHCw=="],
"@npmcli/agent": ["@npmcli/agent@3.0.0", "", { "dependencies": { "agent-base": "7.1.4", "http-proxy-agent": "7.0.2", "https-proxy-agent": "7.0.6", "lru-cache": "10.4.3", "socks-proxy-agent": "8.0.5" } }, "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q=="],
"@npmcli/fs": ["@npmcli/fs@4.0.0", "", { "dependencies": { "semver": "7.7.4" } }, "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q=="],
@@ -255,7 +276,7 @@
"delayed-stream": ["delayed-stream@1.0.0", "", {}, "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ=="],
"detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"detect-libc": ["detect-libc@2.0.2", "", {}, "sha512-UX6sGumvvqSaXgdKGUsgZWqcUyIXZ/vZTrlRT/iobiKhGL0zL4d3osHj3uqllWJK+i+sixDS/3COVEOFbupFyw=="],
"detect-node": ["detect-node@2.1.0", "", {}, "sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g=="],
@@ -433,6 +454,8 @@
"lazy-val": ["lazy-val@1.0.5", "", {}, "sha512-0/BnGCCfyUMkBpeDgWihanIAF9JmZhHBgUhEqzvf+adhNGLoP6TaiI5oF8oyb3I45P+PcnrqihSf01M0l0G5+Q=="],
"libsql": ["libsql@0.5.22", "", { "dependencies": { "@neon-rs/load": "^0.0.4", "detect-libc": "2.0.2" }, "optionalDependencies": { "@libsql/darwin-arm64": "0.5.22", "@libsql/darwin-x64": "0.5.22", "@libsql/linux-arm-gnueabihf": "0.5.22", "@libsql/linux-arm-musleabihf": "0.5.22", "@libsql/linux-arm64-gnu": "0.5.22", "@libsql/linux-arm64-musl": "0.5.22", "@libsql/linux-x64-gnu": "0.5.22", "@libsql/linux-x64-musl": "0.5.22", "@libsql/win32-x64-msvc": "0.5.22" }, "os": [ "linux", "win32", "darwin", ], "cpu": [ "arm", "x64", "arm64", ] }, "sha512-NscWthMQt7fpU8lqd7LXMvT9pi+KhhmTHAJWUB/Lj6MWa0MKFv0F2V4C6WKKpjCVZl0VwcDz4nOI3CyaT1DDiA=="],
"lodash": ["lodash@4.17.23", "", {}, "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w=="],
"log-symbols": ["log-symbols@4.1.0", "", { "dependencies": { "chalk": "4.1.2", "is-unicode-supported": "0.1.0" } }, "sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="],
@@ -703,6 +726,8 @@
"@electron/osx-sign/isbinaryfile": ["isbinaryfile@4.0.10", "", {}, "sha512-iHrqe5shvBUcFbmZq9zOQHBoeOhZJu6RQGrDpBgenUm/Am+F3JM2MgQj+rK3Z601fzrL5gLZWtAPH2OBaSVcyw=="],
"@electron/rebuild/detect-libc": ["detect-libc@2.1.2", "", {}, "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ=="],
"@electron/rebuild/semver": ["semver@7.7.4", "", { "bin": { "semver": "bin/semver.js" } }, "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA=="],
"@electron/universal/fs-extra": ["fs-extra@11.3.3", "", { "dependencies": { "graceful-fs": "4.2.11", "jsonfile": "6.2.0", "universalify": "2.0.1" } }, "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg=="],

View File

@@ -11,7 +11,7 @@
"get-frequency:electron": "bun run build:yomitan && bun build scripts/get_frequency.ts --format=cjs --target=node --outfile dist/scripts/get_frequency.js --external electron && electron dist/scripts/get_frequency.js --pretty --color-top-x 10000 --yomitan-user-data ~/.config/SubMiner --colorized-line",
"test-yomitan-parser": "bun run scripts/test-yomitan-parser.ts",
"test-yomitan-parser:electron": "bun run build:yomitan && bun build scripts/test-yomitan-parser.ts --format=cjs --target=node --outfile dist/scripts/test-yomitan-parser.js --external electron && electron dist/scripts/test-yomitan-parser.js",
"build:yomitan": "node scripts/build-yomitan.mjs",
"build:yomitan": "cd vendor/subminer-yomitan && bun install --frozen-lockfile && bun run build -- --target chrome && rm -rf ../../build/yomitan && mkdir -p ../../build/yomitan && unzip -qo builds/yomitan-chrome.zip -d ../../build/yomitan",
"build": "bun run build:yomitan && tsc -p tsconfig.json && bun run build:renderer && cp src/renderer/index.html src/renderer/style.css dist/renderer/ && cp -r src/renderer/fonts dist/renderer/ && bash scripts/build-macos-helper.sh",
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
"format": "prettier --write .",
@@ -30,20 +30,21 @@
"test:smoke:dist": "bun run test:config:smoke:dist && bun run test:core:smoke:dist",
"test:subtitle:src": "bun test src/core/services/subsync.test.ts src/subsync/utils.test.ts",
"test:immersion:sqlite:src": "bun test src/core/services/immersion-tracker-service.test.ts src/core/services/immersion-tracker/storage-session.test.ts",
"test:immersion:sqlite:dist": "node --experimental-sqlite --test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
"test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist",
"test:src": "node scripts/run-test-lane.mjs bun-src-full",
"test:launcher:unit:src": "node scripts/run-test-lane.mjs bun-launcher-unit",
"test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
"test:immersion:sqlite": "tsc -p tsconfig.json && bun run test:immersion:sqlite:dist",
"test:src": "bun scripts/run-test-lane.mjs bun-src-full",
"test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit",
"test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src",
"test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src",
"test:node:compat": "bun run tsc && node --experimental-sqlite --test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
"test:full": "bun run test:src && bun run test:launcher:unit:src && bun run test:node:compat",
"test:runtime:compat": "tsc -p tsconfig.json && bun test dist/core/services/ipc.test.js dist/core/services/anki-jimaku-ipc.test.js dist/core/services/overlay-manager.test.js dist/main/config-validation.test.js dist/main/runtime/registry.test.js dist/main/runtime/startup-config.test.js",
"test:node:compat": "bun run test:runtime:compat",
"test:full": "bun run test:src && bun run test:launcher:unit:src && bun run test:runtime:compat",
"test": "bun run test:fast",
"test:config": "bun run test:config:src",
"test:launcher": "bun run test:launcher:src",
"test:core": "bun run test:core:src",
"test:subtitle": "bun run test:subtitle:src",
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts && bun run tsc && node --experimental-sqlite --test dist/main/runtime/registry.test.js",
"test:fast": "bun run test:config:src && bun run test:core:src && bun test src/main-entry-runtime.test.ts src/anki-integration/anki-connect-proxy.test.ts src/release-workflow.test.ts && tsc -p tsconfig.json && bun test dist/main/runtime/registry.test.js",
"generate:config-example": "bun run build && bun dist/generate-config-example.js",
"start": "bun run build && electron . --start",
"dev": "bun run build && electron . --start --dev",
@@ -71,6 +72,7 @@
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",
"ws": "^8.19.0"
},
"devDependencies": {

View File

@@ -1,144 +0,0 @@
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { createHash } from 'node:crypto';
import { execFileSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
const dirname = path.dirname(fileURLToPath(import.meta.url));
const repoRoot = path.resolve(dirname, '..');
const submoduleDir = path.join(repoRoot, 'vendor', 'subminer-yomitan');
const submodulePackagePath = path.join(submoduleDir, 'package.json');
const submodulePackageLockPath = path.join(submoduleDir, 'package-lock.json');
const buildOutputDir = path.join(repoRoot, 'build', 'yomitan');
const stampPath = path.join(buildOutputDir, '.subminer-build.json');
const zipPath = path.join(submoduleDir, 'builds', 'yomitan-chrome.zip');
const npmCommand = process.platform === 'win32' ? 'npm.cmd' : 'npm';
const dependencyStampPath = path.join(submoduleDir, 'node_modules', '.subminer-package-lock-hash');
function run(command, args, cwd) {
execFileSync(command, args, { cwd, stdio: 'inherit' });
}
function readCommand(command, args, cwd) {
return execFileSync(command, args, { cwd, encoding: 'utf8' }).trim();
}
function readStamp() {
try {
return JSON.parse(fs.readFileSync(stampPath, 'utf8'));
} catch {
return null;
}
}
function hashFile(filePath) {
const hash = createHash('sha256');
hash.update(fs.readFileSync(filePath));
return hash.digest('hex');
}
function ensureSubmodulePresent() {
if (!fs.existsSync(submodulePackagePath)) {
throw new Error(
'Missing vendor/subminer-yomitan submodule. Run `git submodule update --init --recursive`.',
);
}
}
function getSourceState() {
const revision = readCommand('git', ['rev-parse', 'HEAD'], submoduleDir);
const dirty = readCommand('git', ['status', '--short', '--untracked-files=no'], submoduleDir);
return { revision, dirty };
}
function isBuildCurrent(force) {
if (force) {
return false;
}
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
return false;
}
const stamp = readStamp();
if (!stamp) {
return false;
}
const currentState = getSourceState();
return stamp.revision === currentState.revision && stamp.dirty === currentState.dirty;
}
function ensureDependenciesInstalled() {
const nodeModulesDir = path.join(submoduleDir, 'node_modules');
const currentLockHash = hashFile(submodulePackageLockPath);
let installedLockHash = '';
try {
installedLockHash = fs.readFileSync(dependencyStampPath, 'utf8').trim();
} catch {}
if (!fs.existsSync(nodeModulesDir) || installedLockHash !== currentLockHash) {
run(npmCommand, ['ci'], submoduleDir);
fs.mkdirSync(nodeModulesDir, { recursive: true });
fs.writeFileSync(dependencyStampPath, `${currentLockHash}\n`, 'utf8');
}
}
function installAndBuild() {
ensureDependenciesInstalled();
run(npmCommand, ['run', 'build', '--', '--target', 'chrome'], submoduleDir);
}
function extractBuild() {
if (!fs.existsSync(zipPath)) {
throw new Error(`Expected Yomitan build artifact at ${zipPath}`);
}
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yomitan-'));
try {
run('unzip', ['-qo', zipPath, '-d', tempDir], repoRoot);
fs.rmSync(buildOutputDir, { recursive: true, force: true });
fs.mkdirSync(path.dirname(buildOutputDir), { recursive: true });
fs.cpSync(tempDir, buildOutputDir, { recursive: true });
if (!fs.existsSync(path.join(buildOutputDir, 'manifest.json'))) {
throw new Error(`Extracted Yomitan build missing manifest.json in ${buildOutputDir}`);
}
} finally {
fs.rmSync(tempDir, { recursive: true, force: true });
}
}
function writeStamp() {
const state = getSourceState();
fs.writeFileSync(
stampPath,
`${JSON.stringify(
{
revision: state.revision,
dirty: state.dirty,
builtAt: new Date().toISOString(),
},
null,
2,
)}\n`,
'utf8',
);
}
function main() {
const force = process.argv.includes('--force');
ensureSubmodulePresent();
if (isBuildCurrent(force)) {
process.stdout.write(`Yomitan build current: ${buildOutputDir}\n`);
return;
}
process.stdout.write('Building Yomitan Chrome artifact...\n');
installAndBuild();
extractBuild();
writeStamp();
process.stdout.write(`Yomitan extracted to ${buildOutputDir}\n`);
}
main();

View File

@@ -1,4 +1,4 @@
import { ipcMain } from 'electron';
import electron from 'electron';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
@@ -25,6 +25,8 @@ import {
} from '../../shared/ipc/validators';
import { buildJimakuSubtitleFilenameFromMediaPath } from './jimaku-download-path';
const { ipcMain } = electron;
const logger = createLogger('main:anki-jimaku-ipc');
export interface AnkiJimakuIpcDeps {

View File

@@ -3,9 +3,9 @@ import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import type { DatabaseSync as NodeDatabaseSync } from 'node:sqlite';
import { toMonthKey } from './immersion-tracker/maintenance';
import { enqueueWrite } from './immersion-tracker/queue';
import { Database, type DatabaseSync } from './immersion-tracker/sqlite';
import {
deriveCanonicalTitle,
normalizeText,
@@ -17,22 +17,6 @@ type ImmersionTrackerService = import('./immersion-tracker-service').ImmersionTr
type ImmersionTrackerServiceCtor =
typeof import('./immersion-tracker-service').ImmersionTrackerService;
type DatabaseSyncCtor = typeof NodeDatabaseSync;
const DatabaseSync: DatabaseSyncCtor | null = (() => {
try {
return (require('node:sqlite') as { DatabaseSync?: DatabaseSyncCtor }).DatabaseSync ?? null;
} catch {
return null;
}
})();
const testIfSqlite = DatabaseSync ? test : test.skip;
if (!DatabaseSync) {
console.warn(
'Skipping SQLite-backed immersion tracker persistence tests in this runtime; run `bun run test:immersion:sqlite` for real DB coverage.',
);
}
let trackerCtor: ImmersionTrackerServiceCtor | null = null;
async function loadTrackerCtor(): Promise<ImmersionTrackerServiceCtor> {
@@ -89,7 +73,7 @@ test('seam: toMonthKey uses UTC calendar month', () => {
assert.equal(toMonthKey(Date.UTC(2026, 1, 1, 0, 0, 0, 0)), 202602);
});
testIfSqlite('startSession generates UUID-like session identifiers', async () => {
test('startSession generates UUID-like session identifiers', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -105,7 +89,7 @@ testIfSqlite('startSession generates UUID-like session identifiers', async () =>
privateApi.flushTelemetry(true);
privateApi.flushNow();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
const row = db.prepare('SELECT session_uuid FROM imm_sessions LIMIT 1').get() as {
session_uuid: string;
} | null;
@@ -120,7 +104,7 @@ testIfSqlite('startSession generates UUID-like session identifiers', async () =>
}
});
testIfSqlite('destroy finalizes active session and persists final telemetry', async () => {
test('destroy finalizes active session and persists final telemetry', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -132,7 +116,7 @@ testIfSqlite('destroy finalizes active session and persists final telemetry', as
tracker.recordSubtitleLine('Hello immersion', 0, 1);
tracker.destroy();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
const sessionRow = db.prepare('SELECT ended_at_ms FROM imm_sessions LIMIT 1').get() as {
ended_at_ms: number | null;
} | null;
@@ -150,7 +134,7 @@ testIfSqlite('destroy finalizes active session and persists final telemetry', as
}
});
testIfSqlite('persists and retrieves minimum immersion tracking fields', async () => {
test('persists and retrieves minimum immersion tracking fields', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -178,7 +162,7 @@ testIfSqlite('persists and retrieves minimum immersion tracking fields', async (
tracker.destroy();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
const videoRow = db
.prepare('SELECT canonical_title, source_path, duration_ms FROM imm_videos LIMIT 1')
.get() as {
@@ -190,7 +174,7 @@ testIfSqlite('persists and retrieves minimum immersion tracking fields', async (
.prepare(
`SELECT lines_seen, words_seen, tokens_seen, cards_mined
FROM imm_session_telemetry
ORDER BY sample_ms DESC
ORDER BY sample_ms DESC, telemetry_id DESC
LIMIT 1`,
)
.get() as {
@@ -217,7 +201,7 @@ testIfSqlite('persists and retrieves minimum immersion tracking fields', async (
}
});
testIfSqlite('applies configurable queue, flush, and retention policy', async () => {
test('applies configurable queue, flush, and retention policy', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -270,7 +254,7 @@ testIfSqlite('applies configurable queue, flush, and retention policy', async ()
}
});
testIfSqlite('monthly rollups are grouped by calendar month', async () => {
test('monthly rollups are grouped by calendar month', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -278,7 +262,7 @@ testIfSqlite('monthly rollups are grouped by calendar month', async () => {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
const privateApi = tracker as unknown as {
db: NodeDatabaseSync;
db: DatabaseSync;
runRollupMaintenance: () => void;
};
@@ -433,16 +417,16 @@ testIfSqlite('monthly rollups are grouped by calendar month', async () => {
}
});
testIfSqlite('flushSingle reuses cached prepared statements', async () => {
test('flushSingle reuses cached prepared statements', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
let originalPrepare: NodeDatabaseSync['prepare'] | null = null;
let originalPrepare: DatabaseSync['prepare'] | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
const privateApi = tracker as unknown as {
db: NodeDatabaseSync;
db: DatabaseSync;
flushSingle: (write: {
kind: 'telemetry' | 'event';
sessionId: number;
@@ -472,7 +456,7 @@ testIfSqlite('flushSingle reuses cached prepared statements', async () => {
originalPrepare = privateApi.db.prepare;
let prepareCalls = 0;
privateApi.db.prepare = (...args: Parameters<NodeDatabaseSync['prepare']>) => {
privateApi.db.prepare = (...args: Parameters<DatabaseSync['prepare']>) => {
prepareCalls += 1;
return originalPrepare!.apply(privateApi.db, args);
};
@@ -557,7 +541,7 @@ testIfSqlite('flushSingle reuses cached prepared statements', async () => {
assert.equal(prepareCalls, 0);
} finally {
if (tracker && originalPrepare) {
const privateApi = tracker as unknown as { db: NodeDatabaseSync };
const privateApi = tracker as unknown as { db: DatabaseSync };
privateApi.db.prepare = originalPrepare;
}
tracker?.destroy();

View File

@@ -1,9 +1,9 @@
import path from 'node:path';
import { DatabaseSync } from 'node:sqlite';
import * as fs from 'node:fs';
import { createLogger } from '../../logger';
import { getLocalVideoMetadata } from './immersion-tracker/metadata';
import { pruneRetention, runRollupMaintenance } from './immersion-tracker/maintenance';
import { Database, type DatabaseSync } from './immersion-tracker/sqlite';
import { finalizeSessionRecord, startSessionRecord } from './immersion-tracker/session';
import {
applyPragmas,
@@ -164,7 +164,7 @@ export class ImmersionTrackerService {
1,
3650,
) * 86_400_000;
this.db = new DatabaseSync(this.dbPath);
this.db = new Database(this.dbPath);
applyPragmas(this.db);
ensureSchema(this.db);
this.preparedStatements = createTrackerPreparedStatements(this.db);

View File

@@ -1,4 +1,4 @@
import type { DatabaseSync } from 'node:sqlite';
import type { DatabaseSync } from './sqlite';
const ROLLUP_STATE_KEY = 'last_rollup_sample_ms';
const DAILY_MS = 86_400_000;

View File

@@ -1,4 +1,4 @@
import type { DatabaseSync } from 'node:sqlite';
import type { DatabaseSync } from './sqlite';
import type {
ImmersionSessionRollupRow,
SessionSummaryQueryRow,
@@ -44,7 +44,7 @@ export function getSessionTimeline(
cards_mined AS cardsMined
FROM imm_session_telemetry
WHERE session_id = ?
ORDER BY sample_ms DESC
ORDER BY sample_ms DESC, telemetry_id DESC
LIMIT ?
`);
return prepared.all(sessionId, limit) as unknown as SessionTimelineRow[];
@@ -56,8 +56,8 @@ export function getQueryHints(db: DatabaseSync): {
} {
const sessions = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions');
const active = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions WHERE ended_at_ms IS NULL');
const totalSessions = Number(sessions.get()?.total ?? 0);
const activeSessions = Number(active.get()?.total ?? 0);
const totalSessions = Number((sessions.get() as { total?: number } | null)?.total ?? 0);
const activeSessions = Number((active.get() as { total?: number } | null)?.total ?? 0);
return { totalSessions, activeSessions };
}

View File

@@ -1,5 +1,5 @@
import crypto from 'node:crypto';
import type { DatabaseSync } from 'node:sqlite';
import type { DatabaseSync } from './sqlite';
import { createInitialSessionState } from './reducer';
import { SESSION_STATUS_ACTIVE, SESSION_STATUS_ENDED } from './types';
import type { SessionState } from './types';

View File

@@ -0,0 +1,20 @@
import Database = require('libsql');
export { Database };
export interface DatabaseRunResult {
changes: number;
lastInsertRowid: number | bigint;
}
export interface DatabaseStatement {
run(...params: unknown[]): DatabaseRunResult;
get(...params: unknown[]): unknown;
all(...params: unknown[]): unknown[];
}
export interface DatabaseSync {
prepare(source: string): DatabaseStatement;
exec(source: string): DatabaseSync;
close(): DatabaseSync;
}

View File

@@ -3,7 +3,7 @@ import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import type { DatabaseSync as NodeDatabaseSync } from 'node:sqlite';
import { Database } from './sqlite';
import { finalizeSessionRecord, startSessionRecord } from './session';
import {
createTrackerPreparedStatements,
@@ -13,22 +13,6 @@ import {
} from './storage';
import { EVENT_SUBTITLE_LINE, SESSION_STATUS_ENDED, SOURCE_TYPE_LOCAL } from './types';
type DatabaseSyncCtor = typeof NodeDatabaseSync;
const DatabaseSync: DatabaseSyncCtor | null = (() => {
try {
return (require('node:sqlite') as { DatabaseSync?: DatabaseSyncCtor }).DatabaseSync ?? null;
} catch {
return null;
}
})();
const testIfSqlite = DatabaseSync ? test : test.skip;
if (!DatabaseSync) {
console.warn(
'Skipping SQLite-backed immersion tracker storage/session tests in this runtime; run `bun run test:immersion:sqlite` for real DB coverage.',
);
}
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-imm-storage-session-'));
return path.join(dir, 'immersion.sqlite');
@@ -41,9 +25,9 @@ function cleanupDbPath(dbPath: string): void {
}
}
testIfSqlite('ensureSchema creates immersion core tables', () => {
test('ensureSchema creates immersion core tables', () => {
const dbPath = makeDbPath();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
try {
ensureSchema(db);
@@ -77,9 +61,9 @@ testIfSqlite('ensureSchema creates immersion core tables', () => {
}
});
testIfSqlite('start/finalize session updates ended_at and status', () => {
test('start/finalize session updates ended_at and status', () => {
const dbPath = makeDbPath();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
try {
ensureSchema(db);
@@ -111,9 +95,9 @@ testIfSqlite('start/finalize session updates ended_at and status', () => {
}
});
testIfSqlite('executeQueuedWrite inserts event and telemetry rows', () => {
test('executeQueuedWrite inserts event and telemetry rows', () => {
const dbPath = makeDbPath();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
try {
ensureSchema(db);
@@ -178,9 +162,9 @@ testIfSqlite('executeQueuedWrite inserts event and telemetry rows', () => {
}
});
testIfSqlite('executeQueuedWrite inserts and upserts word and kanji rows', () => {
test('executeQueuedWrite inserts and upserts word and kanji rows', () => {
const dbPath = makeDbPath();
const db = new DatabaseSync!(dbPath);
const db = new Database(dbPath);
try {
ensureSchema(db);

View File

@@ -1,4 +1,4 @@
import type { DatabaseSync } from 'node:sqlite';
import type { DatabaseSync } from './sqlite';
import { SCHEMA_VERSION } from './types';
import type { QueuedWrite, VideoMetadata } from './types';
@@ -13,7 +13,7 @@ function hasColumn(db: DatabaseSync, tableName: string, columnName: string): boo
return db
.prepare(`PRAGMA table_info(${tableName})`)
.all()
.some((row) => (row as { name: string }).name === columnName);
.some((row: unknown) => (row as { name: string }).name === columnName);
}
function addColumnIfMissing(db: DatabaseSync, tableName: string, columnName: string): void {

View File

@@ -1,4 +1,5 @@
import { BrowserWindow, ipcMain, IpcMainEvent } from 'electron';
import electron from 'electron';
import type { IpcMainEvent } from 'electron';
import type {
RuntimeOptionId,
RuntimeOptionValue,
@@ -18,6 +19,8 @@ import {
parseSubsyncManualRunRequest,
} from '../../shared/ipc/validators';
const { BrowserWindow, ipcMain } = electron;
export interface IpcServiceDeps {
onOverlayModalClosed: (modal: OverlayHostedModal) => void;
onOverlayModalOpened?: (modal: OverlayHostedModal) => void;

View File

@@ -1,6 +1,8 @@
import * as fs from 'fs';
import * as path from 'path';
import { safeStorage } from 'electron';
import electron from 'electron';
const { safeStorage } = electron;
interface PersistedSessionPayload {
encryptedSession?: string;

View File

@@ -1,6 +1,6 @@
import { KikuFieldGroupingChoice, KikuFieldGroupingRequestData } from '../../types';
import { createFieldGroupingCallback } from './field-grouping';
import { BrowserWindow } from 'electron';
import type { BrowserWindow } from 'electron';
export function sendToVisibleOverlayRuntime<T extends string>(options: {
mainWindow: BrowserWindow | null;

View File

@@ -1,6 +1,7 @@
import { BrowserWindow } from 'electron';
import type { BrowserWindow } from 'electron';
import { BaseWindowTracker, createWindowTracker } from '../../window-trackers';
import {
AiConfig,
AnkiConnectConfig,
KikuFieldGroupingChoice,
KikuFieldGroupingRequestData,
@@ -13,6 +14,7 @@ type AnkiIntegrationLike = {
type CreateAnkiIntegrationArgs = {
config: AnkiConnectConfig;
aiConfig: AiConfig;
subtitleTimingTracker: unknown;
mpvClient: { send?: (payload: { command: string[] }) => void };
showDesktopNotification: (title: string, options: { body?: string; icon?: string }) => void;
@@ -39,6 +41,7 @@ function createDefaultAnkiIntegration(args: CreateAnkiIntegrationArgs): AnkiInte
args.showDesktopNotification,
args.createFieldGroupingCallback(),
args.knownWordCacheStatePath,
args.aiConfig,
);
}
@@ -57,7 +60,7 @@ export function initializeOverlayRuntime(options: {
targetMpvSocketPath?: string | null,
) => BaseWindowTracker | null;
getMpvSocketPath: () => string;
getResolvedConfig: () => { ankiConnect?: AnkiConnectConfig };
getResolvedConfig: () => { ankiConnect?: AnkiConnectConfig; ai?: AiConfig };
getSubtitleTimingTracker: () => unknown | null;
getMpvClient: () => {
send?: (payload: { command: string[] }) => void;
@@ -118,6 +121,7 @@ export function initializeOverlayRuntime(options: {
const createAnkiIntegration = options.createAnkiIntegration ?? createDefaultAnkiIntegration;
const integration = createAnkiIntegration({
config: effectiveAnkiConfig,
aiConfig: config.ai ?? {},
subtitleTimingTracker,
mpvClient,
showDesktopNotification: options.showDesktopNotification,

View File

@@ -1,8 +1,9 @@
import { globalShortcut } from 'electron';
import electron from 'electron';
import { ConfiguredShortcuts } from '../utils/shortcut-config';
import { isGlobalShortcutRegisteredSafe } from './shortcut-fallback';
import { createLogger } from '../../logger';
const { globalShortcut } = electron;
const logger = createLogger('main:overlay-shortcut-service');
export interface OverlayShortcutHandlers {

View File

@@ -1,9 +1,11 @@
import { BrowserWindow } from 'electron';
import electron from 'electron';
import type { BrowserWindow } from 'electron';
import * as path from 'path';
import { WindowGeometry } from '../../types';
import { createLogger } from '../../logger';
import { IPC_CHANNELS } from '../../shared/ipc/contracts';
const { BrowserWindow: ElectronBrowserWindow } = electron;
const logger = createLogger('main:overlay-window');
const overlayWindowLayerByInstance = new WeakMap<BrowserWindow, OverlayWindowKind>();
@@ -18,7 +20,7 @@ function loadOverlayWindowLayer(window: BrowserWindow, layer: OverlayWindowKind)
.loadFile(htmlPath, {
query: { layer },
})
.catch((err) => {
.catch((err: unknown) => {
logger.error('Failed to load HTML file:', err);
});
}
@@ -90,7 +92,7 @@ export function createOverlayWindow(
onWindowClosed: (kind: OverlayWindowKind) => void;
},
): BrowserWindow {
const window = new BrowserWindow({
const window = new ElectronBrowserWindow({
show: false,
width: 800,
height: 600,

View File

@@ -1,4 +1,6 @@
import { globalShortcut } from 'electron';
import electron from 'electron';
const { globalShortcut } = electron;
export function isGlobalShortcutRegisteredSafe(accelerator: string): boolean {
try {

View File

@@ -1,6 +1,8 @@
import { BrowserWindow, globalShortcut } from 'electron';
import electron from 'electron';
import type { BrowserWindow } from 'electron';
import { createLogger } from '../../logger';
const { globalShortcut } = electron;
const logger = createLogger('main:shortcut');
export interface GlobalShortcutConfig {

View File

@@ -1,4 +1,5 @@
import { BrowserWindow, Extension, session } from 'electron';
import electron from 'electron';
import type { BrowserWindow, Extension } from 'electron';
import * as fs from 'fs';
import { createLogger } from '../../logger';
import { ensureExtensionCopy } from './yomitan-extension-copy';
@@ -7,6 +8,7 @@ import {
resolveExistingYomitanExtensionPath,
} from './yomitan-extension-paths';
const { session } = electron;
const logger = createLogger('main:yomitan-extension-loader');
export interface YomitanExtensionLoaderDeps {

View File

@@ -1,6 +1,8 @@
import { BrowserWindow, Extension, session } from 'electron';
import electron from 'electron';
import type { BrowserWindow, Extension } from 'electron';
import { createLogger } from '../../logger';
const { BrowserWindow: ElectronBrowserWindow, session } = electron;
const logger = createLogger('main:yomitan-settings');
export interface OpenYomitanSettingsWindowOptions {
@@ -28,7 +30,7 @@ export function openYomitanSettingsWindow(options: OpenYomitanSettingsWindowOpti
logger.info('Creating new settings window for extension:', options.yomitanExt.id);
const settingsWindow = new BrowserWindow({
const settingsWindow = new ElectronBrowserWindow({
width: 1200,
height: 800,
show: false,

View File

@@ -1,7 +1,8 @@
import { Notification, nativeImage } from 'electron';
import electron from 'electron';
import * as fs from 'fs';
import { createLogger } from '../../logger';
const { Notification, nativeImage } = electron;
const logger = createLogger('core:notification');
export function showDesktopNotification(

View File

@@ -2264,6 +2264,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
failHandlers: {
logError: (details) => logger.error(details),
showErrorBox: (title, details) => dialog.showErrorBox(title, details),
setExitCode: (code) => {
process.exitCode = code;
},
quit: () => app.quit(),
},
},
@@ -2272,6 +2275,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
failHandlers: {
logError: (message) => logger.error(message),
showErrorBox: (title, message) => dialog.showErrorBox(title, message),
setExitCode: (code) => {
process.exitCode = code;
},
quit: () => app.quit(),
},
},

View File

@@ -32,10 +32,10 @@ test('buildConfigWarningSummary includes warnings with formatted values', () =>
test('buildConfigWarningNotificationBody includes concise warning details', () => {
const body = buildConfigWarningNotificationBody('/tmp/config.jsonc', [
{
path: 'ankiConnect.openRouter',
message: 'Deprecated key; use ankiConnect.ai instead.',
path: 'ankiConnect.ai',
message: 'Expected boolean.',
value: { enabled: true },
fallback: {},
fallback: false,
},
{
path: 'ankiConnect.isLapis.sentenceCardSentenceField',
@@ -47,7 +47,7 @@ test('buildConfigWarningNotificationBody includes concise warning details', () =
assert.match(body, /2 config validation issue\(s\) detected\./);
assert.match(body, /File: \/tmp\/config\.jsonc/);
assert.match(body, /1\. ankiConnect\.openRouter: Deprecated key; use ankiConnect\.ai instead\./);
assert.match(body, /1\. ankiConnect\.ai: Expected boolean\./);
assert.match(
body,
/2\. ankiConnect\.isLapis\.sentenceCardSentenceField: Deprecated key; sentence-card sentence field is fixed to Sentence\./,
@@ -81,8 +81,7 @@ test('buildConfigParseErrorDetails includes path error and restart guidance', ()
test('failStartupFromConfig invokes handlers and throws', () => {
const calls: string[] = [];
const previousExitCode = process.exitCode;
process.exitCode = 0;
const exitCodes: number[] = [];
assert.throws(
() =>
@@ -93,6 +92,9 @@ test('failStartupFromConfig invokes handlers and throws', () => {
showErrorBox: (title, details) => {
calls.push(`dialog:${title}:${details}`);
},
setExitCode: (code) => {
exitCodes.push(code);
},
quit: () => {
calls.push('quit');
},
@@ -100,8 +102,6 @@ test('failStartupFromConfig invokes handlers and throws', () => {
/bad value/,
);
assert.equal(process.exitCode, 1);
assert.deepEqual(exitCodes, [1]);
assert.deepEqual(calls, ['log:bad value', 'dialog:Config Error:bad value', 'quit']);
process.exitCode = previousExitCode;
});

View File

@@ -3,6 +3,7 @@ import type { ConfigValidationWarning } from '../types';
export type StartupFailureHandlers = {
logError: (details: string) => void;
showErrorBox: (title: string, details: string) => void;
setExitCode?: (code: number) => void;
quit: () => void;
};
@@ -98,7 +99,10 @@ export function failStartupFromConfig(
): never {
handlers.logError(details);
handlers.showErrorBox(title, details);
process.exitCode = 1;
handlers.setExitCode?.(1);
if (!handlers.setExitCode) {
process.exitCode = 1;
}
handlers.quit();
throw new Error(details);
}

View File

@@ -1,22 +1,12 @@
import test from 'node:test';
import assert from 'node:assert/strict';
async function loadRegistryOrSkip(t: test.TestContext) {
try {
return await import('./registry');
} catch (error) {
const message = error instanceof Error ? error.message : String(error);
if (message.includes('node:sqlite')) {
t.skip('registry import requires node:sqlite support in this runtime');
return null;
}
throw error;
}
async function loadRegistry() {
return import('./registry');
}
test('createMainRuntimeRegistry exposes expected runtime domains', async (t) => {
const loaded = await loadRegistryOrSkip(t);
if (!loaded) return;
test('createMainRuntimeRegistry exposes expected runtime domains', async () => {
const loaded = await loadRegistry();
const { createMainRuntimeRegistry } = loaded;
const registry = createMainRuntimeRegistry();
@@ -30,9 +20,8 @@ test('createMainRuntimeRegistry exposes expected runtime domains', async (t) =>
assert.ok(registry.mining);
});
test('registry domains expose representative factories', async (t) => {
const loaded = await loadRegistryOrSkip(t);
if (!loaded) return;
test('registry domains expose representative factories', async () => {
const loaded = await loadRegistry();
const { createMainRuntimeRegistry } = loaded;
const registry = createMainRuntimeRegistry();

View File

@@ -17,6 +17,9 @@ export function createBuildReloadConfigMainDepsHandler(deps: ReloadConfigMainDep
logError: (details: string) => deps.failHandlers.logError(details),
showErrorBox: (title: string, details: string) =>
deps.failHandlers.showErrorBox(title, details),
setExitCode: deps.failHandlers.setExitCode
? (code: number) => deps.failHandlers.setExitCode?.(code)
: undefined,
quit: () => deps.failHandlers.quit(),
},
});
@@ -29,6 +32,9 @@ export function createBuildCriticalConfigErrorMainDepsHandler(deps: CriticalConf
logError: (details: string) => deps.failHandlers.logError(details),
showErrorBox: (title: string, details: string) =>
deps.failHandlers.showErrorBox(title, details),
setExitCode: deps.failHandlers.setExitCode
? (code: number) => deps.failHandlers.setExitCode?.(code)
: undefined,
quit: () => deps.failHandlers.quit(),
},
});

View File

@@ -55,8 +55,7 @@ test('createReloadConfigHandler runs success flow with warnings', async () => {
test('createReloadConfigHandler fails startup for parse errors', () => {
const calls: string[] = [];
const previousExitCode = process.exitCode;
process.exitCode = 0;
const exitCodes: number[] = [];
const reloadConfig = createReloadConfigHandler({
reloadConfigStrict: () => ({
@@ -74,12 +73,13 @@ test('createReloadConfigHandler fails startup for parse errors', () => {
failHandlers: {
logError: (details) => calls.push(`error:${details}`),
showErrorBox: (title, details) => calls.push(`dialog:${title}:${details}`),
setExitCode: (code) => exitCodes.push(code),
quit: () => calls.push('quit'),
},
});
assert.throws(() => reloadConfig(), /Failed to parse config file at:/);
assert.equal(process.exitCode, 1);
assert.deepEqual(exitCodes, [1]);
assert.ok(calls.some((entry) => entry.startsWith('error:Failed to parse config file at:')));
assert.ok(calls.some((entry) => entry.includes('/tmp/config.jsonc')));
assert.ok(calls.some((entry) => entry.includes('Error: unexpected token')));
@@ -91,20 +91,18 @@ test('createReloadConfigHandler fails startup for parse errors', () => {
);
assert.ok(calls.includes('quit'));
assert.equal(calls.includes('hotReload:start'), false);
process.exitCode = previousExitCode;
});
test('createCriticalConfigErrorHandler formats and fails', () => {
const calls: string[] = [];
const previousExitCode = process.exitCode;
process.exitCode = 0;
const exitCodes: number[] = [];
const handleCriticalErrors = createCriticalConfigErrorHandler({
getConfigPath: () => '/tmp/config.jsonc',
failHandlers: {
logError: (details) => calls.push(`error:${details}`),
showErrorBox: (title, details) => calls.push(`dialog:${title}:${details}`),
setExitCode: (code) => exitCodes.push(code),
quit: () => calls.push('quit'),
},
});
@@ -114,11 +112,9 @@ test('createCriticalConfigErrorHandler formats and fails', () => {
/Critical config validation failed/,
);
assert.equal(process.exitCode, 1);
assert.deepEqual(exitCodes, [1]);
assert.ok(calls.some((entry) => entry.includes('/tmp/config.jsonc')));
assert.ok(calls.some((entry) => entry.includes('1. foo invalid')));
assert.ok(calls.some((entry) => entry.includes('2. bar invalid')));
assert.ok(calls.includes('quit'));
process.exitCode = previousExitCode;
});

View File

@@ -31,6 +31,7 @@ export type ReloadConfigRuntimeDeps = {
failHandlers: {
logError: (details: string) => void;
showErrorBox: (title: string, details: string) => void;
setExitCode?: (code: number) => void;
quit: () => void;
};
};
@@ -40,6 +41,7 @@ export type CriticalConfigErrorRuntimeDeps = {
failHandlers: {
logError: (details: string) => void;
showErrorBox: (title: string, details: string) => void;
setExitCode?: (code: number) => void;
quit: () => void;
};
};