test: stabilize bun coverage reporting

This commit is contained in:
2026-03-27 23:35:59 -07:00
parent 23b2360ac4
commit 9caf25bedb
26 changed files with 732 additions and 99 deletions

View File

@@ -61,6 +61,16 @@ jobs:
- name: Test suite (source) - name: Test suite (source)
run: bun run test:fast run: bun run test:fast
- name: Coverage suite (maintained source lane)
run: bun run test:coverage:src
- name: Upload coverage artifact
uses: actions/upload-artifact@v4
with:
name: coverage-test-src
path: coverage/test-src/lcov.info
if-no-files-found: error
- name: Launcher smoke suite (source) - name: Launcher smoke suite (source)
run: bun run test:launcher:smoke:src run: bun run test:launcher:smoke:src

View File

@@ -49,6 +49,16 @@ jobs:
- name: Test suite (source) - name: Test suite (source)
run: bun run test:fast run: bun run test:fast
- name: Coverage suite (maintained source lane)
run: bun run test:coverage:src
- name: Upload coverage artifact
uses: actions/upload-artifact@v4
with:
name: coverage-test-src
path: coverage/test-src/lcov.info
if-no-files-found: error
- name: Launcher smoke suite (source) - name: Launcher smoke suite (source)
run: bun run test:launcher:smoke:src run: bun run test:launcher:smoke:src

View File

@@ -34,6 +34,8 @@ None.
| SM-008 | P3 | todo | subtitles | Add core subtitle-position persistence/path tests | | SM-008 | P3 | todo | subtitles | Add core subtitle-position persistence/path tests |
| SM-009 | P3 | todo | tokenizer | Add tests for JLPT token filter | | SM-009 | P3 | todo | tokenizer | Add tests for JLPT token filter |
| SM-010 | P1 | todo | immersion-tracker | Refactor storage + immersion-tracker service into focused modules | | SM-010 | P1 | todo | immersion-tracker | Refactor storage + immersion-tracker service into focused modules |
| SM-011 | P1 | done | tests | Add coverage reporting for maintained test lanes |
| SM-012 | P2 | done | config/runtime | Replace JSON serialize-clone helpers with structured cloning |
## Icebox ## Icebox
@@ -45,7 +47,7 @@ None.
Title: Add tests for CLI parser and args normalizer Title: Add tests for CLI parser and args normalizer
Priority: P1 Priority: P1
Status: todo Status: done
Scope: Scope:
- `launcher/config/cli-parser-builder.ts` - `launcher/config/cli-parser-builder.ts`
@@ -192,3 +194,43 @@ Acceptance:
- YouTube code split into pure utilities, a stateful manager (`YouTubeManager`), and a dedicated write queue (`WriteQueue`) - YouTube code split into pure utilities, a stateful manager (`YouTubeManager`), and a dedicated write queue (`WriteQueue`)
- removed `storage.ts` is replaced with focused modules and updated imports - removed `storage.ts` is replaced with focused modules and updated imports
- no API or migration regressions; existing tests for trackers/storage coverage remain green or receive focused updates - no API or migration regressions; existing tests for trackers/storage coverage remain green or receive focused updates
### SM-011
Title: Add coverage reporting for maintained test lanes
Priority: P1
Status: done
Scope:
- `package.json`
- CI workflow files under `.github/`
- `docs/workflow/verification.md`
Acceptance:
- at least one maintained test lane emits machine-readable coverage output
- CI surfaces coverage as an artifact, summary, or check output
- local contributor path for coverage is documented
- chosen coverage path works with Bun/TypeScript lanes already maintained by the repo
Implementation note:
- Added `bun run test:coverage:src` for the maintained source lane via a sharded coverage runner, with merged LCOV output at `coverage/test-src/lcov.info` and CI/release artifact upload as `coverage-test-src`.
### SM-012
Title: Replace JSON serialize-clone helpers with structured cloning
Priority: P2
Status: todo
Scope:
- `src/runtime-options.ts`
- `src/config/definitions.ts`
- `src/config/service.ts`
- `src/main/controller-config-update.ts`
Acceptance:
- runtime/config clone helpers stop using `JSON.parse(JSON.stringify(...))`
- replacement preserves current behavior for plain config/runtime objects
- focused tests cover clone/merge behavior that could regress during the swap
- no new clone helper is introduced in these paths without a documented reason
Done:
- replaced JSON serialize-clone call sites in runtime/config/controller update paths with `structuredClone`
- updated focused tests and fixtures to cover detached clone behavior and guard against regressions

View File

@@ -0,0 +1,5 @@
type: internal
area: release
- Added a maintained source coverage lane that shards Bun coverage one test file at a time and merges LCOV output into `coverage/test-src/lcov.info`.
- CI and release quality-gate now upload the merged source-lane LCOV artifact for inspection.

View File

@@ -21,6 +21,7 @@ Read when: you need internal architecture, workflow, verification, or release gu
- New feature or refactor: [Workflow](./workflow/README.md), then [Architecture](./architecture/README.md) - New feature or refactor: [Workflow](./workflow/README.md), then [Architecture](./architecture/README.md)
- Test/build/release work: [Verification](./workflow/verification.md), then [Release Guide](./RELEASING.md) - Test/build/release work: [Verification](./workflow/verification.md), then [Release Guide](./RELEASING.md)
- Coverage lane selection or LCOV artifact path: [Verification](./workflow/verification.md)
- “What owns this behavior?”: [Domains](./architecture/domains.md) - “What owns this behavior?”: [Domains](./architecture/domains.md)
- “Can these modules depend on each other?”: [Layering](./architecture/layering.md) - “Can these modules depend on each other?”: [Layering](./architecture/layering.md)
- “What doc should exist for this?”: [Catalog](./knowledge-base/catalog.md) - “What doc should exist for this?”: [Catalog](./knowledge-base/catalog.md)

View File

@@ -31,8 +31,15 @@ bun run docs:build
- Config/schema/defaults: `bun run test:config`, then `bun run generate:config-example` if template/defaults changed - Config/schema/defaults: `bun run test:config`, then `bun run generate:config-example` if template/defaults changed
- Launcher/plugin: `bun run test:launcher` or `bun run test:env` - Launcher/plugin: `bun run test:launcher` or `bun run test:env`
- Runtime-compat / compiled behavior: `bun run test:runtime:compat` - Runtime-compat / compiled behavior: `bun run test:runtime:compat`
- Coverage for the maintained source lane: `bun run test:coverage:src`
- Deep/local full gate: default handoff gate above - Deep/local full gate: default handoff gate above
## Coverage Reporting
- `bun run test:coverage:src` runs the maintained `test:src` lane through a sharded coverage runner: one Bun coverage process per test file, then merged LCOV output.
- Machine-readable output lands at `coverage/test-src/lcov.info`.
- CI and release quality-gate runs upload that LCOV file as the `coverage-test-src` artifact.
## Rules ## Rules
- Capture exact failing command and error when verification breaks. - Capture exact failing command and error when verification breaks.

View File

@@ -52,6 +52,8 @@
"test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js", "test:immersion:sqlite:dist": "bun test dist/core/services/immersion-tracker-service.test.js dist/core/services/immersion-tracker/storage-session.test.js",
"test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist", "test:immersion:sqlite": "bun run tsc && bun run test:immersion:sqlite:dist",
"test:src": "bun scripts/run-test-lane.mjs bun-src-full", "test:src": "bun scripts/run-test-lane.mjs bun-src-full",
"test:coverage:src": "bun run scripts/run-coverage-lane.ts bun-src-full --coverage-dir coverage/test-src",
"test:coverage:subtitle:src": "bun test --coverage --coverage-reporter=text --coverage-reporter=lcov --coverage-dir coverage/test-subtitle src/core/services/subsync.test.ts src/subsync/utils.test.ts",
"test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit", "test:launcher:unit:src": "bun scripts/run-test-lane.mjs bun-launcher-unit",
"test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src", "test:launcher:env:src": "bun run test:launcher:smoke:src && bun run test:plugin:src",
"test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src", "test:env": "bun run test:launcher:env:src && bun run test:immersion:sqlite:src",
@@ -63,7 +65,7 @@
"test:launcher": "bun run test:launcher:src", "test:launcher": "bun run test:launcher:src",
"test:core": "bun run test:core:src", "test:core": "bun run test:core:src",
"test:subtitle": "bun run test:subtitle:src", "test:subtitle": "bun run test:subtitle:src",
"test:fast": "bun run test:config:src && bun run test:core:src && bun run test:docs:kb && bun test src/main-entry-runtime.test.ts src/anki-integration.test.ts src/anki-integration/anki-connect-proxy.test.ts src/anki-integration/field-grouping-workflow.test.ts src/anki-integration/field-grouping.test.ts src/anki-integration/field-grouping-merge.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts scripts/mkv-to-readme-video.test.ts scripts/update-aur-package.test.ts && bun test src/core/services/immersion-tracker/__tests__/query.test.ts src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js", "test:fast": "bun run test:config:src && bun run test:core:src && bun run test:docs:kb && bun test src/main-entry-runtime.test.ts src/anki-integration.test.ts src/anki-integration/anki-connect-proxy.test.ts src/anki-integration/field-grouping-workflow.test.ts src/anki-integration/field-grouping.test.ts src/anki-integration/field-grouping-merge.test.ts src/release-workflow.test.ts src/ci-workflow.test.ts scripts/build-changelog.test.ts scripts/mkv-to-readme-video.test.ts scripts/run-coverage-lane.test.ts scripts/update-aur-package.test.ts && bun test src/core/services/immersion-tracker/__tests__/query.test.ts src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts && bun run tsc && bun test dist/main/runtime/registry.test.js",
"generate:config-example": "bun run src/generate-config-example.ts", "generate:config-example": "bun run src/generate-config-example.ts",
"verify:config-example": "bun run src/verify-config-example.ts", "verify:config-example": "bun run src/verify-config-example.ts",
"start": "bun run build && electron . --start", "start": "bun run build && electron . --start",

View File

@@ -0,0 +1,61 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { mergeLcovReports } from './run-coverage-lane';
test('mergeLcovReports combines duplicate source-file counters across shard outputs', () => {
const merged = mergeLcovReports([
[
'SF:src/example.ts',
'FN:10,alpha',
'FNDA:1,alpha',
'DA:10,1',
'DA:11,0',
'BRDA:10,0,0,1',
'BRDA:10,0,1,-',
'end_of_record',
'',
].join('\n'),
[
'SF:src/example.ts',
'FN:10,alpha',
'FN:20,beta',
'FNDA:2,alpha',
'FNDA:1,beta',
'DA:10,2',
'DA:11,1',
'DA:20,1',
'BRDA:10,0,0,0',
'BRDA:10,0,1,1',
'end_of_record',
'',
].join('\n'),
]);
assert.match(merged, /SF:src\/example\.ts/);
assert.match(merged, /FN:10,alpha/);
assert.match(merged, /FN:20,beta/);
assert.match(merged, /FNDA:3,alpha/);
assert.match(merged, /FNDA:1,beta/);
assert.match(merged, /FNF:2/);
assert.match(merged, /FNH:2/);
assert.match(merged, /DA:10,3/);
assert.match(merged, /DA:11,1/);
assert.match(merged, /DA:20,1/);
assert.match(merged, /LF:3/);
assert.match(merged, /LH:3/);
assert.match(merged, /BRDA:10,0,0,1/);
assert.match(merged, /BRDA:10,0,1,1/);
assert.match(merged, /BRF:2/);
assert.match(merged, /BRH:2/);
});
test('mergeLcovReports keeps distinct source files as separate records', () => {
const merged = mergeLcovReports([
['SF:src/a.ts', 'DA:1,1', 'end_of_record', ''].join('\n'),
['SF:src/b.ts', 'DA:2,1', 'end_of_record', ''].join('\n'),
]);
assert.match(merged, /SF:src\/a\.ts[\s\S]*end_of_record/);
assert.match(merged, /SF:src\/b\.ts[\s\S]*end_of_record/);
});

View File

@@ -0,0 +1,296 @@
import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from 'node:fs';
import { spawnSync } from 'node:child_process';
import { fileURLToPath } from 'node:url';
import { join, relative, resolve } from 'node:path';
type LaneConfig = {
roots: string[];
include: string[];
exclude: Set<string>;
};
type LcovRecord = {
sourceFile: string;
functions: Map<string, number>;
functionHits: Map<string, number>;
lines: Map<number, number>;
branches: Map<string, { line: number; block: string; branch: string; hits: number | null }>;
};
const repoRoot = resolve(fileURLToPath(new URL('..', import.meta.url)));
const lanes: Record<string, LaneConfig> = {
'bun-src-full': {
roots: ['src'],
include: ['.test.ts', '.type-test.ts'],
exclude: new Set([
'src/core/services/anki-jimaku-ipc.test.ts',
'src/core/services/ipc.test.ts',
'src/core/services/overlay-manager.test.ts',
'src/main/config-validation.test.ts',
'src/main/runtime/registry.test.ts',
'src/main/runtime/startup-config.test.ts',
]),
},
'bun-launcher-unit': {
roots: ['launcher'],
include: ['.test.ts'],
exclude: new Set(['launcher/smoke.e2e.test.ts']),
},
};
function collectFiles(rootDir: string, includeSuffixes: string[], excludeSet: Set<string>): string[] {
const out: string[] = [];
const visit = (currentDir: string) => {
for (const entry of readdirSync(currentDir, { withFileTypes: true })) {
const fullPath = resolve(currentDir, entry.name);
if (entry.isDirectory()) {
visit(fullPath);
continue;
}
const relPath = relative(repoRoot, fullPath).replaceAll('\\', '/');
if (excludeSet.has(relPath)) continue;
if (includeSuffixes.some((suffix) => relPath.endsWith(suffix))) {
out.push(relPath);
}
}
};
visit(resolve(repoRoot, rootDir));
out.sort();
return out;
}
function getLaneFiles(laneName: string): string[] {
const lane = lanes[laneName];
if (!lane) {
throw new Error(`Unknown coverage lane: ${laneName}`);
}
const files = lane.roots.flatMap((rootDir) => collectFiles(rootDir, lane.include, lane.exclude));
if (files.length === 0) {
throw new Error(`No test files found for coverage lane: ${laneName}`);
}
return files;
}
function parseCoverageDirArg(argv: string[]): string {
for (let index = 0; index < argv.length; index += 1) {
if (argv[index] === '--coverage-dir') {
const next = argv[index + 1];
if (!next) {
throw new Error('Missing value for --coverage-dir');
}
return next;
}
}
return 'coverage';
}
function parseLcovReport(report: string): LcovRecord[] {
const records: LcovRecord[] = [];
let current: LcovRecord | null = null;
const ensureCurrent = (): LcovRecord => {
if (!current) {
throw new Error('Malformed lcov report: record data before SF');
}
return current;
};
for (const rawLine of report.split(/\r?\n/)) {
const line = rawLine.trim();
if (!line) continue;
if (line.startsWith('TN:')) {
continue;
}
if (line.startsWith('SF:')) {
current = {
sourceFile: line.slice(3),
functions: new Map(),
functionHits: new Map(),
lines: new Map(),
branches: new Map(),
};
continue;
}
if (line === 'end_of_record') {
if (current) {
records.push(current);
current = null;
}
continue;
}
if (line.startsWith('FN:')) {
const [lineNumber, ...nameParts] = line.slice(3).split(',');
ensureCurrent().functions.set(nameParts.join(','), Number(lineNumber));
continue;
}
if (line.startsWith('FNDA:')) {
const [hits, ...nameParts] = line.slice(5).split(',');
ensureCurrent().functionHits.set(nameParts.join(','), Number(hits));
continue;
}
if (line.startsWith('DA:')) {
const [lineNumber, hits] = line.slice(3).split(',');
ensureCurrent().lines.set(Number(lineNumber), Number(hits));
continue;
}
if (line.startsWith('BRDA:')) {
const [lineNumber, block, branch, hits] = line.slice(5).split(',');
ensureCurrent().branches.set(`${lineNumber}:${block}:${branch}`, {
line: Number(lineNumber),
block,
branch,
hits: hits === '-' ? null : Number(hits),
});
}
}
if (current) {
records.push(current);
}
return records;
}
export function mergeLcovReports(reports: string[]): string {
const merged = new Map<string, LcovRecord>();
for (const report of reports) {
for (const record of parseLcovReport(report)) {
let target = merged.get(record.sourceFile);
if (!target) {
target = {
sourceFile: record.sourceFile,
functions: new Map(),
functionHits: new Map(),
lines: new Map(),
branches: new Map(),
};
merged.set(record.sourceFile, target);
}
for (const [name, line] of record.functions) {
if (!target.functions.has(name)) {
target.functions.set(name, line);
}
}
for (const [name, hits] of record.functionHits) {
target.functionHits.set(name, (target.functionHits.get(name) ?? 0) + hits);
}
for (const [lineNumber, hits] of record.lines) {
target.lines.set(lineNumber, (target.lines.get(lineNumber) ?? 0) + hits);
}
for (const [branchKey, branchRecord] of record.branches) {
const existing = target.branches.get(branchKey);
if (!existing) {
target.branches.set(branchKey, { ...branchRecord });
continue;
}
if (branchRecord.hits === null) {
continue;
}
existing.hits = (existing.hits ?? 0) + branchRecord.hits;
}
}
}
const chunks: string[] = [];
for (const sourceFile of [...merged.keys()].sort()) {
const record = merged.get(sourceFile)!;
chunks.push(`SF:${record.sourceFile}`);
const functions = [...record.functions.entries()].sort((a, b) =>
a[1] === b[1] ? a[0].localeCompare(b[0]) : a[1] - b[1],
);
for (const [name, line] of functions) {
chunks.push(`FN:${line},${name}`);
}
for (const [name] of functions) {
chunks.push(`FNDA:${record.functionHits.get(name) ?? 0},${name}`);
}
chunks.push(`FNF:${functions.length}`);
chunks.push(`FNH:${functions.filter(([name]) => (record.functionHits.get(name) ?? 0) > 0).length}`);
const branches = [...record.branches.values()].sort((a, b) =>
a.line === b.line
? a.block === b.block
? a.branch.localeCompare(b.branch)
: a.block.localeCompare(b.block)
: a.line - b.line,
);
for (const branch of branches) {
chunks.push(
`BRDA:${branch.line},${branch.block},${branch.branch},${branch.hits === null ? '-' : branch.hits}`,
);
}
chunks.push(`BRF:${branches.length}`);
chunks.push(`BRH:${branches.filter((branch) => (branch.hits ?? 0) > 0).length}`);
const lines = [...record.lines.entries()].sort((a, b) => a[0] - b[0]);
for (const [lineNumber, hits] of lines) {
chunks.push(`DA:${lineNumber},${hits}`);
}
chunks.push(`LF:${lines.length}`);
chunks.push(`LH:${lines.filter(([, hits]) => hits > 0).length}`);
chunks.push('end_of_record');
}
return chunks.length > 0 ? `${chunks.join('\n')}\n` : '';
}
function runCoverageLane(): number {
const laneName = process.argv[2];
if (!laneName) {
process.stderr.write('Missing coverage lane name\n');
return 1;
}
const coverageDir = resolve(repoRoot, parseCoverageDirArg(process.argv.slice(3)));
const shardRoot = join(coverageDir, '.shards');
mkdirSync(coverageDir, { recursive: true });
rmSync(shardRoot, { recursive: true, force: true });
mkdirSync(shardRoot, { recursive: true });
const files = getLaneFiles(laneName);
const reports: string[] = [];
for (const [index, file] of files.entries()) {
const shardDir = join(shardRoot, `${String(index + 1).padStart(3, '0')}`);
const result = spawnSync(
'bun',
['test', '--coverage', '--coverage-reporter=lcov', '--coverage-dir', shardDir, `./${file}`],
{
cwd: repoRoot,
stdio: 'inherit',
},
);
if (result.error) {
throw result.error;
}
if ((result.status ?? 1) !== 0) {
return result.status ?? 1;
}
const lcovPath = join(shardDir, 'lcov.info');
if (!existsSync(lcovPath)) {
process.stdout.write(`Skipping empty coverage shard for ${file}\n`);
continue;
}
reports.push(readFileSync(lcovPath, 'utf8'));
}
writeFileSync(join(coverageDir, 'lcov.info'), mergeLcovReports(reports), 'utf8');
rmSync(shardRoot, { recursive: true, force: true });
process.stdout.write(`Merged LCOV written to ${relative(repoRoot, join(coverageDir, 'lcov.info'))}\n`);
return 0;
}
if (import.meta.main) {
process.exit(runCoverageLane());
}

View File

@@ -85,13 +85,15 @@ test('KnownWordCacheManager startLifecycle keeps fresh persisted cache without i
}, },
}; };
const { manager, calls, statePath, cleanup } = createKnownWordCacheHarness(config); const { manager, calls, statePath, cleanup } = createKnownWordCacheHarness(config);
const originalDateNow = Date.now;
try { try {
Date.now = () => 120_000;
fs.writeFileSync( fs.writeFileSync(
statePath, statePath,
JSON.stringify({ JSON.stringify({
version: 2, version: 2,
refreshedAtMs: Date.now(), refreshedAtMs: 120_000,
scope: '{"refreshMinutes":60,"scope":"is:note","fieldsWord":""}', scope: '{"refreshMinutes":60,"scope":"is:note","fieldsWord":""}',
words: ['猫'], words: ['猫'],
notes: { notes: {
@@ -102,12 +104,20 @@ test('KnownWordCacheManager startLifecycle keeps fresh persisted cache without i
); );
manager.startLifecycle(); manager.startLifecycle();
await new Promise((resolve) => setTimeout(resolve, 25));
assert.equal(manager.isKnownWord('猫'), true); assert.equal(manager.isKnownWord('猫'), true);
assert.equal(calls.findNotes, 0); assert.equal(calls.findNotes, 0);
assert.equal(calls.notesInfo, 0); assert.equal(calls.notesInfo, 0);
assert.equal(
(
manager as unknown as {
getMsUntilNextRefresh: () => number;
}
).getMsUntilNextRefresh() > 0,
true,
);
} finally { } finally {
Date.now = originalDateNow;
manager.stopLifecycle(); manager.stopLifecycle();
cleanup(); cleanup();
} }
@@ -124,13 +134,15 @@ test('KnownWordCacheManager startLifecycle immediately refreshes stale persisted
}, },
}; };
const { manager, calls, statePath, clientState, cleanup } = createKnownWordCacheHarness(config); const { manager, calls, statePath, clientState, cleanup } = createKnownWordCacheHarness(config);
const originalDateNow = Date.now;
try { try {
Date.now = () => 120_000;
fs.writeFileSync( fs.writeFileSync(
statePath, statePath,
JSON.stringify({ JSON.stringify({
version: 2, version: 2,
refreshedAtMs: Date.now() - 61_000, refreshedAtMs: 59_000,
scope: '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}', scope: '{"refreshMinutes":1,"scope":"is:note","fieldsWord":"Word"}',
words: ['猫'], words: ['猫'],
notes: { notes: {
@@ -156,6 +168,7 @@ test('KnownWordCacheManager startLifecycle immediately refreshes stale persisted
assert.equal(manager.isKnownWord('猫'), false); assert.equal(manager.isKnownWord('猫'), false);
assert.equal(manager.isKnownWord('犬'), true); assert.equal(manager.isKnownWord('犬'), true);
} finally { } finally {
Date.now = originalDateNow;
manager.stopLifecycle(); manager.stopLifecycle();
cleanup(); cleanup();
} }

View File

@@ -4,12 +4,15 @@ import test from 'node:test';
import { PollingRunner } from './polling'; import { PollingRunner } from './polling';
test('polling runner records newly added cards after initialization', async () => { test('polling runner records newly added cards after initialization', async () => {
const originalDateNow = Date.now;
const recordedCards: number[] = []; const recordedCards: number[] = [];
let tracked = new Set<number>(); let tracked = new Set<number>();
const responses = [ const responses = [
[10, 11], [10, 11],
[10, 11, 12, 13], [10, 11, 12, 13],
]; ];
try {
Date.now = () => 120_000;
const runner = new PollingRunner({ const runner = new PollingRunner({
getDeck: () => 'Mining', getDeck: () => 'Mining',
getPollingRate: () => 250, getPollingRate: () => 250,
@@ -35,4 +38,7 @@ test('polling runner records newly added cards after initialization', async () =
await runner.pollOnce(); await runner.pollOnce();
assert.deepEqual(recordedCards, [2]); assert.deepEqual(recordedCards, [2]);
} finally {
Date.now = originalDateNow;
}
}); });

View File

@@ -5,6 +5,10 @@ import { resolve } from 'node:path';
const ciWorkflowPath = resolve(__dirname, '../.github/workflows/ci.yml'); const ciWorkflowPath = resolve(__dirname, '../.github/workflows/ci.yml');
const ciWorkflow = readFileSync(ciWorkflowPath, 'utf8'); const ciWorkflow = readFileSync(ciWorkflowPath, 'utf8');
const packageJsonPath = resolve(__dirname, '../package.json');
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as {
scripts: Record<string, string>;
};
test('ci workflow lints changelog fragments', () => { test('ci workflow lints changelog fragments', () => {
assert.match(ciWorkflow, /bun run changelog:lint/); assert.match(ciWorkflow, /bun run changelog:lint/);
@@ -18,3 +22,17 @@ test('ci workflow checks pull requests for required changelog fragments', () =>
test('ci workflow verifies generated config examples stay in sync', () => { test('ci workflow verifies generated config examples stay in sync', () => {
assert.match(ciWorkflow, /bun run verify:config-example/); assert.match(ciWorkflow, /bun run verify:config-example/);
}); });
test('package scripts expose a sharded maintained source coverage lane with lcov output', () => {
assert.equal(
packageJson.scripts['test:coverage:src'],
'bun run scripts/run-coverage-lane.ts bun-src-full --coverage-dir coverage/test-src',
);
});
test('ci workflow runs the maintained source coverage lane and uploads lcov output', () => {
assert.match(ciWorkflow, /name: Coverage suite \(maintained source lane\)/);
assert.match(ciWorkflow, /run: bun run test:coverage:src/);
assert.match(ciWorkflow, /name: Upload coverage artifact/);
assert.match(ciWorkflow, /path: coverage\/test-src\/lcov\.info/);
});

View File

@@ -4,7 +4,7 @@ import * as fs from 'fs';
import * as os from 'os'; import * as os from 'os';
import * as path from 'path'; import * as path from 'path';
import { ConfigService, ConfigStartupParseError } from './service'; import { ConfigService, ConfigStartupParseError } from './service';
import { DEFAULT_CONFIG, RUNTIME_OPTION_REGISTRY } from './definitions'; import { DEFAULT_CONFIG, RUNTIME_OPTION_REGISTRY, deepMergeRawConfig } from './definitions';
import { generateConfigTemplate } from './template'; import { generateConfigTemplate } from './template';
function makeTempDir(): string { function makeTempDir(): string {
@@ -1032,6 +1032,61 @@ test('reloadConfigStrict parse failure does not mutate raw config or warnings',
assert.deepEqual(service.getWarnings(), beforeWarnings); assert.deepEqual(service.getWarnings(), beforeWarnings);
}); });
test('SM-012 config paths do not use JSON serialize-clone helpers', () => {
const definitionsSource = fs.readFileSync(
path.join(process.cwd(), 'src/config/definitions.ts'),
'utf-8',
);
const serviceSource = fs.readFileSync(path.join(process.cwd(), 'src/config/service.ts'), 'utf-8');
assert.equal(definitionsSource.includes('JSON.parse(JSON.stringify('), false);
assert.equal(serviceSource.includes('JSON.parse(JSON.stringify('), false);
});
test('getRawConfig returns a detached clone', () => {
const dir = makeTempDir();
fs.writeFileSync(
path.join(dir, 'config.jsonc'),
`{
"ankiConnect": {
"tags": ["SubMiner"]
}
}`,
'utf-8',
);
const service = new ConfigService(dir);
const raw = service.getRawConfig();
raw.ankiConnect!.tags!.push('mutated');
assert.deepEqual(service.getRawConfig().ankiConnect?.tags, ['SubMiner']);
});
test('deepMergeRawConfig returns a detached merged clone', () => {
const base = {
ankiConnect: {
tags: ['SubMiner'],
behavior: {
autoUpdateNewCards: true,
},
},
};
const merged = deepMergeRawConfig(base, {
ankiConnect: {
behavior: {
autoUpdateNewCards: false,
},
},
});
merged.ankiConnect!.tags!.push('mutated');
merged.ankiConnect!.behavior!.autoUpdateNewCards = true;
assert.deepEqual(base.ankiConnect?.tags, ['SubMiner']);
assert.equal(base.ankiConnect?.behavior?.autoUpdateNewCards, true);
});
test('warning emission order is deterministic across reloads', () => { test('warning emission order is deterministic across reloads', () => {
const dir = makeTempDir(); const dir = makeTempDir();
const configPath = path.join(dir, 'config.jsonc'); const configPath = path.join(dir, 'config.jsonc');

View File

@@ -84,11 +84,11 @@ export const CONFIG_OPTION_REGISTRY = [
export { CONFIG_TEMPLATE_SECTIONS }; export { CONFIG_TEMPLATE_SECTIONS };
export function deepCloneConfig(config: ResolvedConfig): ResolvedConfig { export function deepCloneConfig(config: ResolvedConfig): ResolvedConfig {
return JSON.parse(JSON.stringify(config)) as ResolvedConfig; return structuredClone(config);
} }
export function deepMergeRawConfig(base: RawConfig, patch: RawConfig): RawConfig { export function deepMergeRawConfig(base: RawConfig, patch: RawConfig): RawConfig {
const clone = JSON.parse(JSON.stringify(base)) as Record<string, unknown>; const clone = structuredClone(base) as Record<string, unknown>;
const patchObject = patch as Record<string, unknown>; const patchObject = patch as Record<string, unknown>;
const mergeInto = (target: Record<string, unknown>, source: Record<string, unknown>): void => { const mergeInto = (target: Record<string, unknown>, source: Record<string, unknown>): void => {

View File

@@ -61,7 +61,7 @@ export class ConfigService {
} }
getRawConfig(): RawConfig { getRawConfig(): RawConfig {
return JSON.parse(JSON.stringify(this.rawConfig)) as RawConfig; return structuredClone(this.rawConfig);
} }
getWarnings(): ConfigValidationWarning[] { getWarnings(): ConfigValidationWarning[] {

View File

@@ -8,7 +8,6 @@ import {
pruneRawRetention, pruneRawRetention,
pruneRollupRetention, pruneRollupRetention,
runOptimizeMaintenance, runOptimizeMaintenance,
toMonthKey,
} from './maintenance'; } from './maintenance';
import { ensureSchema } from './storage'; import { ensureSchema } from './storage';
@@ -31,9 +30,9 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
try { try {
ensureSchema(db); ensureSchema(db);
const nowMs = 90 * 86_400_000; const nowMs = 1_000_000_000;
const staleEndedAtMs = nowMs - 40 * 86_400_000; const staleEndedAtMs = nowMs - 400_000_000;
const keptEndedAtMs = nowMs - 5 * 86_400_000; const keptEndedAtMs = nowMs - 50_000_000;
db.exec(` db.exec(`
INSERT INTO imm_videos ( INSERT INTO imm_videos (
@@ -49,14 +48,14 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
INSERT INTO imm_session_telemetry ( INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES ) VALUES
(1, ${nowMs - 2 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}), (1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs}),
(2, ${nowMs - 12 * 60 * 60 * 1000}, 0, 0, ${nowMs}, ${nowMs}); (2, ${nowMs - 10_000_000}, 0, 0, ${nowMs}, ${nowMs});
`); `);
const result = pruneRawRetention(db, nowMs, { const result = pruneRawRetention(db, nowMs, {
eventsRetentionMs: 7 * 86_400_000, eventsRetentionMs: 120_000_000,
telemetryRetentionMs: 1 * 86_400_000, telemetryRetentionMs: 80_000_000,
sessionsRetentionMs: 30 * 86_400_000, sessionsRetentionMs: 300_000_000,
}); });
const remainingSessions = db const remainingSessions = db
@@ -88,9 +87,9 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
try { try {
ensureSchema(db); ensureSchema(db);
const nowMs = Date.UTC(2026, 2, 16, 12, 0, 0, 0); const nowMs = 1_000_000_000;
const oldDay = Math.floor((nowMs - 90 * 86_400_000) / 86_400_000); const oldDay = Math.floor((nowMs - 200_000_000) / 86_400_000);
const oldMonth = toMonthKey(nowMs - 400 * 86_400_000); const oldMonth = 196912;
db.exec(` db.exec(`
INSERT INTO imm_videos ( INSERT INTO imm_videos (
@@ -101,12 +100,12 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
INSERT INTO imm_sessions ( INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES ( ) VALUES (
1, 'session-1', 1, ${nowMs - 90 * 86_400_000}, ${nowMs - 90 * 86_400_000 + 1_000}, 2, ${nowMs}, ${nowMs} 1, 'session-1', 1, ${nowMs - 200_000_000}, ${nowMs - 199_999_000}, 2, ${nowMs}, ${nowMs}
); );
INSERT INTO imm_session_telemetry ( INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES ( ) VALUES (
1, ${nowMs - 90 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs} 1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs}
); );
INSERT INTO imm_daily_rollups ( INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
@@ -123,9 +122,9 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
`); `);
pruneRawRetention(db, nowMs, { pruneRawRetention(db, nowMs, {
eventsRetentionMs: 7 * 86_400_000, eventsRetentionMs: 120_000_000,
telemetryRetentionMs: 30 * 86_400_000, telemetryRetentionMs: 120_000_000,
sessionsRetentionMs: 30 * 86_400_000, sessionsRetentionMs: 120_000_000,
}); });
const rollupsAfterRawPrune = db const rollupsAfterRawPrune = db
@@ -139,8 +138,8 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
assert.equal(monthlyAfterRawPrune?.total, 1); assert.equal(monthlyAfterRawPrune?.total, 1);
const rollupPrune = pruneRollupRetention(db, nowMs, { const rollupPrune = pruneRollupRetention(db, nowMs, {
dailyRollupRetentionMs: 30 * 86_400_000, dailyRollupRetentionMs: 120_000_000,
monthlyRollupRetentionMs: 365 * 86_400_000, monthlyRollupRetentionMs: 1,
}); });
const rollupsAfterRollupPrune = db const rollupsAfterRollupPrune = db

View File

@@ -61,19 +61,19 @@ export function pruneRawRetention(
const sessionsCutoff = nowMs - policy.sessionsRetentionMs; const sessionsCutoff = nowMs - policy.sessionsRetentionMs;
const deletedSessionEvents = ( const deletedSessionEvents = (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(eventCutoff) as { db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(toDbMs(eventCutoff)) as {
changes: number; changes: number;
} }
).changes; ).changes;
const deletedTelemetryRows = ( const deletedTelemetryRows = (
db.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`).run(telemetryCutoff) as { db
changes: number; .prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`)
} .run(toDbMs(telemetryCutoff)) as { changes: number }
).changes; ).changes;
const deletedEndedSessions = ( const deletedEndedSessions = (
db db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`) .prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(sessionsCutoff) as { changes: number } .run(toDbMs(sessionsCutoff)) as { changes: number }
).changes; ).changes;
return { return {

View File

@@ -263,7 +263,9 @@ test('reportProgress posts timeline payload and treats failure as non-fatal', as
audioStreamIndex: 1, audioStreamIndex: 1,
subtitleStreamIndex: 2, subtitleStreamIndex: 2,
}); });
const expectedPostedPayload = JSON.parse(JSON.stringify(expectedPayload)); const expectedPostedPayload = Object.fromEntries(
Object.entries(structuredClone(expectedPayload)).filter(([, value]) => value !== undefined),
);
const ok = await service.reportProgress({ const ok = await service.reportProgress({
itemId: 'movie-2', itemId: 'movie-2',

View File

@@ -1255,7 +1255,7 @@ test('dictionary settings helpers upsert and remove dictionary entries without r
const deps = createDeps(async (script) => { const deps = createDeps(async (script) => {
scripts.push(script); scripts.push(script);
if (script.includes('optionsGetFull')) { if (script.includes('optionsGetFull')) {
return JSON.parse(JSON.stringify(optionsFull)); return structuredClone(optionsFull);
} }
if (script.includes('setAllSettings')) { if (script.includes('setAllSettings')) {
return true; return true;

View File

@@ -1,8 +1,18 @@
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
import test from 'node:test'; import test from 'node:test';
import { applyControllerConfigUpdate } from './controller-config-update.js'; import { applyControllerConfigUpdate } from './controller-config-update.js';
test('SM-012 controller config update path does not use JSON serialize-clone helpers', () => {
const source = fs.readFileSync(
path.join(process.cwd(), 'src/main/controller-config-update.ts'),
'utf-8',
);
assert.equal(source.includes('JSON.parse(JSON.stringify('), false);
});
test('applyControllerConfigUpdate replaces binding descriptors instead of deep-merging them', () => { test('applyControllerConfigUpdate replaces binding descriptors instead of deep-merging them', () => {
const next = applyControllerConfigUpdate( const next = applyControllerConfigUpdate(
{ {
@@ -52,3 +62,16 @@ test('applyControllerConfigUpdate merges buttonIndices while replacing only upda
assert.deepEqual(next.bindings?.toggleLookup, { kind: 'button', buttonIndex: 0 }); assert.deepEqual(next.bindings?.toggleLookup, { kind: 'button', buttonIndex: 0 });
assert.deepEqual(next.bindings?.closeLookup, { kind: 'none' }); assert.deepEqual(next.bindings?.closeLookup, { kind: 'none' });
}); });
test('applyControllerConfigUpdate detaches updated binding values from the patch object', () => {
const update = {
bindings: {
toggleLookup: { kind: 'button' as const, buttonIndex: 7 },
},
};
const next = applyControllerConfigUpdate(undefined, update);
update.bindings.toggleLookup.buttonIndex = 99;
assert.deepEqual(next.bindings?.toggleLookup, { kind: 'button', buttonIndex: 7 });
});

View File

@@ -28,7 +28,7 @@ export function applyControllerConfigUpdate(
[keyof RawControllerBindings, RawControllerBindings[keyof RawControllerBindings] | undefined] [keyof RawControllerBindings, RawControllerBindings[keyof RawControllerBindings] | undefined]
>) { >) {
if (value === undefined) continue; if (value === undefined) continue;
(nextBindings as Record<string, unknown>)[key] = JSON.parse(JSON.stringify(value)); (nextBindings as Record<string, unknown>)[key] = structuredClone(value);
} }
nextController.bindings = nextBindings; nextController.bindings = nextBindings;

View File

@@ -21,7 +21,7 @@ test('process next anilist retry update main deps builder maps callbacks', async
now: () => 7, now: () => 7,
})(); })();
assert.deepEqual(deps.nextReady(), { key: 'k', title: 't', episode: 1 }); assert.deepEqual(deps.nextReady(), { key: 'k', title: 't', season: null, episode: 1 });
deps.refreshRetryQueueState(); deps.refreshRetryQueueState();
deps.setLastAttemptAt(1); deps.setLastAttemptAt(1);
deps.setLastError('x'); deps.setLastError('x');

View File

@@ -84,7 +84,10 @@ test('findAnilistSetupDeepLinkArgvUrl returns null when missing', () => {
}); });
test('consumeAnilistSetupCallbackUrl persists token and closes window for callback URL', () => { test('consumeAnilistSetupCallbackUrl persists token and closes window for callback URL', () => {
const originalDateNow = Date.now;
const events: string[] = []; const events: string[] = [];
try {
Date.now = () => 120_000;
const handled = consumeAnilistSetupCallbackUrl({ const handled = consumeAnilistSetupCallbackUrl({
rawUrl: 'https://anilist.subminer.moe/#access_token=saved-token', rawUrl: 'https://anilist.subminer.moe/#access_token=saved-token',
saveToken: (value: string) => events.push(`save:${value}`), saveToken: (value: string) => events.push(`save:${value}`),
@@ -105,10 +108,16 @@ test('consumeAnilistSetupCallbackUrl persists token and closes window for callba
'success', 'success',
'close', 'close',
]); ]);
} finally {
Date.now = originalDateNow;
}
}); });
test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL', () => { test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL', () => {
const originalDateNow = Date.now;
const events: string[] = []; const events: string[] = [];
try {
Date.now = () => 120_000;
const handled = consumeAnilistSetupCallbackUrl({ const handled = consumeAnilistSetupCallbackUrl({
rawUrl: 'subminer://anilist-setup?access_token=saved-token', rawUrl: 'subminer://anilist-setup?access_token=saved-token',
saveToken: (value: string) => events.push(`save:${value}`), saveToken: (value: string) => events.push(`save:${value}`),
@@ -129,6 +138,9 @@ test('consumeAnilistSetupCallbackUrl persists token for subminer deep link URL',
'success', 'success',
'close', 'close',
]); ]);
} finally {
Date.now = originalDateNow;
}
}); });
test('consumeAnilistSetupCallbackUrl ignores non-callback URLs', () => { test('consumeAnilistSetupCallbackUrl ignores non-callback URLs', () => {

View File

@@ -36,6 +36,13 @@ test('release workflow verifies generated config examples before packaging artif
assert.match(releaseWorkflow, /bun run verify:config-example/); assert.match(releaseWorkflow, /bun run verify:config-example/);
}); });
test('release quality gate runs the maintained source coverage lane and uploads lcov output', () => {
assert.match(releaseWorkflow, /name: Coverage suite \(maintained source lane\)/);
assert.match(releaseWorkflow, /run: bun run test:coverage:src/);
assert.match(releaseWorkflow, /name: Upload coverage artifact/);
assert.match(releaseWorkflow, /path: coverage\/test-src\/lcov\.info/);
});
test('release build jobs install and cache stats dependencies before packaging', () => { test('release build jobs install and cache stats dependencies before packaging', () => {
assert.match(releaseWorkflow, /build-linux:[\s\S]*stats\/node_modules/); assert.match(releaseWorkflow, /build-linux:[\s\S]*stats\/node_modules/);
assert.match(releaseWorkflow, /build-macos:[\s\S]*stats\/node_modules/); assert.match(releaseWorkflow, /build-macos:[\s\S]*stats\/node_modules/);

View File

@@ -0,0 +1,64 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import path from 'node:path';
import test from 'node:test';
import { RuntimeOptionsManager } from './runtime-options';
test('SM-012 runtime options path does not use JSON serialize-clone helpers', () => {
const source = fs.readFileSync(path.join(process.cwd(), 'src/runtime-options.ts'), 'utf-8');
assert.equal(source.includes('JSON.parse(JSON.stringify('), false);
});
test('RuntimeOptionsManager returns detached effective Anki config copies', () => {
const baseConfig = {
deck: 'Mining',
note: 'Sentence',
tags: ['SubMiner'],
behavior: {
autoUpdateNewCards: true,
updateIntervalMs: 5000,
},
fieldMapping: {
sentence: 'Sentence',
meaning: 'Meaning',
audio: 'Audio',
image: 'Image',
context: 'Context',
source: 'Source',
definition: 'Definition',
sequence: 'Sequence',
contextSecondary: 'ContextSecondary',
contextTertiary: 'ContextTertiary',
primarySpelling: 'PrimarySpelling',
primaryReading: 'PrimaryReading',
wordSpelling: 'WordSpelling',
wordReading: 'WordReading',
},
duplicates: {
mode: 'note' as const,
scope: 'deck' as const,
allowedFields: [],
},
ai: {
enabled: false,
model: '',
systemPrompt: '',
},
};
const manager = new RuntimeOptionsManager(
() => structuredClone(baseConfig),
{
applyAnkiPatch: () => undefined,
onOptionsChanged: () => undefined,
},
);
const effective = manager.getEffectiveAnkiConnectConfig();
effective.tags!.push('mutated');
effective.behavior!.autoUpdateNewCards = false;
assert.deepEqual(baseConfig.tags, ['SubMiner']);
assert.equal(baseConfig.behavior.autoUpdateNewCards, true);
});

View File

@@ -29,7 +29,7 @@ import { RUNTIME_OPTION_REGISTRY, RuntimeOptionRegistryEntry } from './config';
type RuntimeOverrides = Record<string, unknown>; type RuntimeOverrides = Record<string, unknown>;
function deepClone<T>(value: T): T { function deepClone<T>(value: T): T {
return JSON.parse(JSON.stringify(value)) as T; return structuredClone(value);
} }
function getPathValue(source: Record<string, unknown>, path: string): unknown { function getPathValue(source: Record<string, unknown>, path: string): unknown {