feat: stabilize startup sync and overlay/runtime paths

This commit is contained in:
2026-03-17 00:48:55 -07:00
parent de574c04bd
commit 11710f20db
69 changed files with 5323 additions and 495 deletions

View File

@@ -159,6 +159,40 @@ test('stats command launches attached app command with response path', async ()
]);
});
test('stats command returns after startup response even if app process stays running', async () => {
const context = createContext();
context.args.stats = true;
const forwarded: string[][] = [];
const started = new Promise<number>((resolve) => setTimeout(() => resolve(0), 20));
const statsCommand = runStatsCommand(context, {
createTempDir: () => '/tmp/subminer-stats-test',
joinPath: (...parts) => parts.join('/'),
runAppCommandAttached: async (_appPath, appArgs) => {
forwarded.push(appArgs);
return started;
},
waitForStatsResponse: async () => ({ ok: true, url: 'http://127.0.0.1:5175' }),
removeDir: () => {},
});
const result = await Promise.race([
statsCommand.then(() => 'resolved'),
new Promise<'timeout'>((resolve) => setTimeout(() => resolve('timeout'), 5)),
]);
assert.equal(result, 'timeout');
const final = await statsCommand;
assert.equal(final, true);
assert.deepEqual(forwarded, [
[
'--stats',
'--stats-response-path',
'/tmp/subminer-stats-test/response.json',
],
]);
});
test('stats cleanup command forwards cleanup vocab flags to the app', async () => {
const context = createContext();
context.args.stats = true;
@@ -189,6 +223,36 @@ test('stats cleanup command forwards cleanup vocab flags to the app', async () =
]);
});
test('stats cleanup command forwards lifetime rebuild flag to the app', async () => {
const context = createContext();
context.args.stats = true;
context.args.statsCleanup = true;
context.args.statsCleanupLifetime = true;
const forwarded: string[][] = [];
const handled = await runStatsCommand(context, {
createTempDir: () => '/tmp/subminer-stats-test',
joinPath: (...parts) => parts.join('/'),
runAppCommandAttached: async (_appPath, appArgs) => {
forwarded.push(appArgs);
return 0;
},
waitForStatsResponse: async () => ({ ok: true }),
removeDir: () => {},
});
assert.equal(handled, true);
assert.deepEqual(forwarded, [
[
'--stats',
'--stats-response-path',
'/tmp/subminer-stats-test/response.json',
'--stats-cleanup',
'--stats-cleanup-lifetime',
],
]);
});
test('stats command throws when stats response reports an error', async () => {
const context = createContext();
context.args.stats = true;
@@ -207,9 +271,11 @@ test('stats command throws when stats response reports an error', async () => {
}, /Immersion tracking is disabled in config\./);
});
test('stats command fails if attached app exits before startup response', async () => {
test('stats cleanup command fails if attached app exits before startup response', async () => {
const context = createContext();
context.args.stats = true;
context.args.statsCleanup = true;
context.args.statsCleanupVocab = true;
await assert.rejects(async () => {
await runStatsCommand(context, {

View File

@@ -24,13 +24,15 @@ type StatsCommandDeps = {
removeDir: (targetPath: string) => void;
};
const STATS_STARTUP_RESPONSE_TIMEOUT_MS = 8_000;
const defaultDeps: StatsCommandDeps = {
createTempDir: (prefix) => fs.mkdtempSync(path.join(os.tmpdir(), prefix)),
joinPath: (...parts) => path.join(...parts),
runAppCommandAttached: (appPath, appArgs, logLevel, label) =>
runAppCommandAttached(appPath, appArgs, logLevel, label),
waitForStatsResponse: async (responsePath) => {
const deadline = Date.now() + 8000;
const deadline = Date.now() + STATS_STARTUP_RESPONSE_TIMEOUT_MS;
while (Date.now() < deadline) {
try {
if (fs.existsSync(responsePath)) {
@@ -71,20 +73,46 @@ export async function runStatsCommand(
if (args.statsCleanupVocab) {
forwarded.push('--stats-cleanup-vocab');
}
if (args.statsCleanupLifetime) {
forwarded.push('--stats-cleanup-lifetime');
}
if (args.logLevel !== 'info') {
forwarded.push('--log-level', args.logLevel);
}
const attachedExitPromise = deps.runAppCommandAttached(
appPath,
forwarded,
args.logLevel,
'stats',
);
const attachedExitPromise = deps.runAppCommandAttached(appPath, forwarded, args.logLevel, 'stats');
if (!args.statsCleanup) {
const startupResult = await Promise.race([
deps
.waitForStatsResponse(responsePath)
.then((response) => ({ kind: 'response' as const, response })),
attachedExitPromise.then((status) => ({ kind: 'exit' as const, status })),
]);
if (startupResult.kind === 'exit') {
if (startupResult.status !== 0) {
throw new Error(
`Stats app exited before startup response (status ${startupResult.status}).`,
);
}
const response = await deps.waitForStatsResponse(responsePath);
if (!response.ok) {
throw new Error(response.error || 'Stats dashboard failed to start.');
}
return true;
}
if (!startupResult.response.ok) {
throw new Error(startupResult.response.error || 'Stats dashboard failed to start.');
}
await attachedExitPromise;
return true;
}
const attachedExitPromiseCleanup = attachedExitPromise;
const startupResult = await Promise.race([
deps
.waitForStatsResponse(responsePath)
.then((response) => ({ kind: 'response' as const, response })),
attachedExitPromise.then((status) => ({ kind: 'exit' as const, status })),
attachedExitPromiseCleanup.then((status) => ({ kind: 'exit' as const, status })),
]);
if (startupResult.kind === 'exit') {
if (startupResult.status !== 0) {
@@ -101,7 +129,7 @@ export async function runStatsCommand(
if (!startupResult.response.ok) {
throw new Error(startupResult.response.error || 'Stats dashboard failed to start.');
}
const exitStatus = await attachedExitPromise;
const exitStatus = await attachedExitPromiseCleanup;
if (exitStatus !== 0) {
throw new Error(`Stats app exited with status ${exitStatus}.`);
}

View File

@@ -125,6 +125,7 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
stats: false,
statsCleanup: false,
statsCleanupVocab: false,
statsCleanupLifetime: false,
doctor: false,
configPath: false,
configShow: false,
@@ -194,6 +195,7 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
if (invocations.statsTriggered) parsed.stats = true;
if (invocations.statsCleanup) parsed.statsCleanup = true;
if (invocations.statsCleanupVocab) parsed.statsCleanupVocab = true;
if (invocations.statsCleanupLifetime) parsed.statsCleanupLifetime = true;
if (invocations.dictionaryTarget) {
parsed.dictionaryTarget = parseDictionaryTarget(invocations.dictionaryTarget);
}

View File

@@ -43,6 +43,7 @@ export interface CliInvocations {
statsTriggered: boolean;
statsCleanup: boolean;
statsCleanupVocab: boolean;
statsCleanupLifetime: boolean;
statsLogLevel: string | null;
doctorTriggered: boolean;
doctorLogLevel: string | null;
@@ -145,6 +146,7 @@ export function parseCliPrograms(
let statsTriggered = false;
let statsCleanup = false;
let statsCleanupVocab = false;
let statsCleanupLifetime = false;
let statsLogLevel: string | null = null;
let doctorLogLevel: string | null = null;
let texthookerLogLevel: string | null = null;
@@ -253,14 +255,21 @@ export function parseCliPrograms(
commandProgram
.command('stats')
.description('Launch the local immersion stats dashboard')
.argument('[action]', 'cleanup')
.argument('[action]', 'cleanup|rebuild|backfill')
.option('-v, --vocab', 'Clean vocabulary rows in the stats database')
.option('-l, --lifetime', 'Rebuild lifetime summary rows from retained data')
.option('--log-level <level>', 'Log level')
.action((action: string | undefined, options: Record<string, unknown>) => {
statsTriggered = true;
if ((action || '').toLowerCase() === 'cleanup') {
const normalizedAction = (action || '').toLowerCase();
if (normalizedAction === 'cleanup') {
statsCleanup = true;
statsCleanupVocab = options.vocab !== false;
statsCleanupLifetime = options.lifetime === true;
statsCleanupVocab = statsCleanupLifetime ? false : options.vocab !== false;
} else if (normalizedAction === 'rebuild' || normalizedAction === 'backfill') {
statsCleanup = true;
statsCleanupLifetime = true;
statsCleanupVocab = false;
}
statsLogLevel = typeof options.logLevel === 'string' ? options.logLevel : null;
});
@@ -346,6 +355,7 @@ export function parseCliPrograms(
statsTriggered,
statsCleanup,
statsCleanupVocab,
statsCleanupLifetime,
statsLogLevel,
doctorTriggered,
doctorLogLevel,

View File

@@ -26,7 +26,9 @@ type RunResult = {
};
function withTempDir<T>(fn: (dir: string) => T): T {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-launcher-test-'));
// Keep paths short on macOS/Linux: Unix domain sockets have small path-length limits.
const tmpBase = process.platform === 'win32' ? os.tmpdir() : '/tmp';
const dir = fs.mkdtempSync(path.join(tmpBase, 'subminer-launcher-test-'));
try {
return fn(dir);
} finally {
@@ -279,8 +281,8 @@ for arg in "$@"; do
;;
esac
done
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const socket=process.argv[1]; try { fs.rmSync(socket,{force:true}); } catch {} const server=net.createServer((conn)=>conn.end()); server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250));" "$socket_path"
`,
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const path=require('node:path'); const socket=process.argv[1]||''; try{ if(socket) fs.mkdirSync(path.dirname(socket),{recursive:true}); }catch{} try{ if(socket) fs.rmSync(socket,{force:true}); }catch{} const server=net.createServer((c)=>c.end()); server.on('error',()=>process.exit(0)); if(!socket) process.exit(0); try{ server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250)); } catch { process.exit(0); }" "$socket_path"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
@@ -391,6 +393,54 @@ exit 0
},
);
test(
'stats command tolerates slower dashboard startup before timing out',
{ timeout: 20000 },
() => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const appPath = path.join(root, 'fake-subminer-slow.sh');
fs.writeFileSync(
appPath,
`#!/bin/sh
set -eu
response_path=""
prev=""
for arg in "$@"; do
if [ "$prev" = "--stats-response-path" ]; then
response_path="$arg"
prev=""
continue
fi
case "$arg" in
--stats-response-path=*)
response_path="\${arg#--stats-response-path=}"
;;
--stats-response-path)
prev="--stats-response-path"
;;
esac
done
sleep 9
mkdir -p "$(dirname "$response_path")"
printf '%s' '{"ok":true,"url":"http://127.0.0.1:5175"}' > "$response_path"
exit 0
`,
);
fs.chmodSync(appPath, 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
SUBMINER_APPIMAGE_PATH: appPath,
};
const result = runLauncher(['stats'], env);
assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
});
},
);
test('jellyfin discovery routes to app --background and remote announce with log-level forwarding', () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');

View File

@@ -81,3 +81,21 @@ test('parseArgs maps explicit stats cleanup vocab flag', () => {
assert.equal(parsed.statsCleanup, true);
assert.equal(parsed.statsCleanupVocab, true);
});
test('parseArgs maps lifetime stats cleanup flag', () => {
const parsed = parseArgs(['stats', 'cleanup', '--lifetime'], 'subminer', {});
assert.equal(parsed.stats, true);
assert.equal(parsed.statsCleanup, true);
assert.equal(parsed.statsCleanupVocab, false);
assert.equal(parsed.statsCleanupLifetime, true);
});
test('parseArgs maps stats rebuild action to cleanup lifetime mode', () => {
const parsed = parseArgs(['stats', 'rebuild'], 'subminer', {});
assert.equal(parsed.stats, true);
assert.equal(parsed.statsCleanup, true);
assert.equal(parsed.statsCleanupVocab, false);
assert.equal(parsed.statsCleanupLifetime, true);
});

View File

@@ -114,6 +114,7 @@ export interface Args {
stats: boolean;
statsCleanup?: boolean;
statsCleanupVocab?: boolean;
statsCleanupLifetime?: boolean;
dictionaryTarget?: string;
doctor: boolean;
configPath: boolean;

View File

@@ -147,12 +147,26 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
'--stats',
'--stats-response-path',
'/tmp/subminer-stats-response.json',
'--stats-cleanup-lifetime',
]);
assert.equal(stats.stats, true);
assert.equal(stats.statsResponsePath, '/tmp/subminer-stats-response.json');
assert.equal(stats.statsCleanup, false);
assert.equal(stats.statsCleanupVocab, false);
assert.equal(stats.statsCleanupLifetime, true);
assert.equal(hasExplicitCommand(stats), true);
assert.equal(shouldStartApp(stats), true);
const statsLifetimeRebuild = parseArgs([
'--stats',
'--stats-cleanup',
'--stats-cleanup-lifetime',
]);
assert.equal(statsLifetimeRebuild.stats, true);
assert.equal(statsLifetimeRebuild.statsCleanup, true);
assert.equal(statsLifetimeRebuild.statsCleanupLifetime, true);
assert.equal(statsLifetimeRebuild.statsCleanupVocab, false);
const jellyfinLibraries = parseArgs(['--jellyfin-libraries']);
assert.equal(jellyfinLibraries.jellyfinLibraries, true);
assert.equal(hasExplicitCommand(jellyfinLibraries), true);

View File

@@ -32,6 +32,7 @@ export interface CliArgs {
stats: boolean;
statsCleanup?: boolean;
statsCleanupVocab?: boolean;
statsCleanupLifetime?: boolean;
statsResponsePath?: string;
jellyfin: boolean;
jellyfinLogin: boolean;
@@ -104,6 +105,7 @@ export function parseArgs(argv: string[]): CliArgs {
stats: false,
statsCleanup: false,
statsCleanupVocab: false,
statsCleanupLifetime: false,
jellyfin: false,
jellyfinLogin: false,
jellyfinLogout: false,
@@ -172,6 +174,7 @@ export function parseArgs(argv: string[]): CliArgs {
} else if (arg === '--stats') args.stats = true;
else if (arg === '--stats-cleanup') args.statsCleanup = true;
else if (arg === '--stats-cleanup-vocab') args.statsCleanupVocab = true;
else if (arg === '--stats-cleanup-lifetime') args.statsCleanupLifetime = true;
else if (arg.startsWith('--stats-response-path=')) {
const value = arg.split('=', 2)[1];
if (value) args.statsResponsePath = value;

View File

@@ -85,11 +85,17 @@ test('loads defaults when config is missing', () => {
assert.equal(config.immersionTracking.queueCap, 1000);
assert.equal(config.immersionTracking.payloadCapBytes, 256);
assert.equal(config.immersionTracking.maintenanceIntervalMs, 86_400_000);
assert.equal(config.immersionTracking.retention.eventsDays, 7);
assert.equal(config.immersionTracking.retention.telemetryDays, 30);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 365);
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 1825);
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 7);
assert.equal(config.immersionTracking.retention.eventsDays, 0);
assert.equal(config.immersionTracking.retention.telemetryDays, 0);
assert.equal(config.immersionTracking.retention.sessionsDays, 0);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 0);
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 0);
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 0);
assert.equal(config.immersionTracking.retentionMode, 'preset');
assert.equal(config.immersionTracking.retentionPreset, 'balanced');
assert.equal(config.immersionTracking.lifetimeSummaries?.global, true);
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
assert.equal(config.immersionTracking.lifetimeSummaries?.media, true);
});
test('throws actionable startup parse error for malformed config at construction time', () => {
@@ -742,12 +748,20 @@ test('accepts immersion tracking config values', () => {
"queueCap": 2000,
"payloadCapBytes": 512,
"maintenanceIntervalMs": 3600000,
"retentionMode": "preset",
"retentionPreset": "minimal",
"retention": {
"eventsDays": 14,
"telemetryDays": 45,
"sessionsDays": 60,
"dailyRollupsDays": 730,
"monthlyRollupsDays": 3650,
"vacuumIntervalDays": 14
},
"lifetimeSummaries": {
"global": false,
"anime": true,
"media": false
}
}
}`,
@@ -766,9 +780,15 @@ test('accepts immersion tracking config values', () => {
assert.equal(config.immersionTracking.maintenanceIntervalMs, 3_600_000);
assert.equal(config.immersionTracking.retention.eventsDays, 14);
assert.equal(config.immersionTracking.retention.telemetryDays, 45);
assert.equal(config.immersionTracking.retention.sessionsDays, 60);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 730);
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 3650);
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 14);
assert.equal(config.immersionTracking.retentionMode, 'preset');
assert.equal(config.immersionTracking.retentionPreset, 'minimal');
assert.equal(config.immersionTracking.lifetimeSummaries?.global, false);
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
assert.equal(config.immersionTracking.lifetimeSummaries?.media, false);
});
test('falls back for invalid immersion tracking tuning values', () => {
@@ -777,18 +797,22 @@ test('falls back for invalid immersion tracking tuning values', () => {
path.join(dir, 'config.jsonc'),
`{
"immersionTracking": {
"retentionMode": "bad",
"retentionPreset": "bad",
"batchSize": 0,
"flushIntervalMs": 1,
"queueCap": 5,
"payloadCapBytes": 16,
"maintenanceIntervalMs": 1000,
"retention": {
"eventsDays": 0,
"eventsDays": -1,
"telemetryDays": 99999,
"dailyRollupsDays": 0,
"sessionsDays": -1,
"dailyRollupsDays": -1,
"monthlyRollupsDays": 999999,
"vacuumIntervalDays": 0
}
"vacuumIntervalDays": -1
},
"lifetimeSummaries": "bad"
}
}`,
'utf-8',
@@ -803,11 +827,17 @@ test('falls back for invalid immersion tracking tuning values', () => {
assert.equal(config.immersionTracking.queueCap, 1000);
assert.equal(config.immersionTracking.payloadCapBytes, 256);
assert.equal(config.immersionTracking.maintenanceIntervalMs, 86_400_000);
assert.equal(config.immersionTracking.retention.eventsDays, 7);
assert.equal(config.immersionTracking.retention.telemetryDays, 30);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 365);
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 1825);
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 7);
assert.equal(config.immersionTracking.retention.eventsDays, 0);
assert.equal(config.immersionTracking.retention.telemetryDays, 0);
assert.equal(config.immersionTracking.retention.sessionsDays, 0);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 0);
assert.equal(config.immersionTracking.retention.monthlyRollupsDays, 0);
assert.equal(config.immersionTracking.retention.vacuumIntervalDays, 0);
assert.equal(config.immersionTracking.retentionMode, 'preset');
assert.equal(config.immersionTracking.retentionPreset, 'balanced');
assert.equal(config.immersionTracking.lifetimeSummaries?.global, true);
assert.equal(config.immersionTracking.lifetimeSummaries?.anime, true);
assert.equal(config.immersionTracking.lifetimeSummaries?.media, true);
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.batchSize'));
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.flushIntervalMs'));
@@ -818,6 +848,9 @@ test('falls back for invalid immersion tracking tuning values', () => {
assert.ok(
warnings.some((warning) => warning.path === 'immersionTracking.retention.telemetryDays'),
);
assert.ok(
warnings.some((warning) => warning.path === 'immersionTracking.retention.sessionsDays'),
);
assert.ok(
warnings.some((warning) => warning.path === 'immersionTracking.retention.dailyRollupsDays'),
);
@@ -827,6 +860,37 @@ test('falls back for invalid immersion tracking tuning values', () => {
assert.ok(
warnings.some((warning) => warning.path === 'immersionTracking.retention.vacuumIntervalDays'),
);
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.retentionMode'));
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.retentionPreset'));
assert.ok(warnings.some((warning) => warning.path === 'immersionTracking.lifetimeSummaries'));
});
test('applies retention presets and explicit overrides', () => {
const dir = makeTempDir();
fs.writeFileSync(
path.join(dir, 'config.jsonc'),
`{
"immersionTracking": {
"retentionMode": "preset",
"retentionPreset": "minimal",
"retention": {
"eventsDays": 11,
"sessionsDays": 8
}
}
}`,
'utf-8',
);
const service = new ConfigService(dir);
const config = service.getConfig();
assert.equal(config.immersionTracking.retentionMode, 'preset');
assert.equal(config.immersionTracking.retentionPreset, 'minimal');
assert.equal(config.immersionTracking.retention.eventsDays, 11);
assert.equal(config.immersionTracking.retention.sessionsDays, 8);
assert.equal(config.immersionTracking.retention.telemetryDays, 14);
assert.equal(config.immersionTracking.retention.dailyRollupsDays, 30);
});
test('parses jsonc and warns/falls back on invalid value', () => {

View File

@@ -9,12 +9,20 @@ export const IMMERSION_DEFAULT_CONFIG: Pick<ResolvedConfig, 'immersionTracking'>
queueCap: 1000,
payloadCapBytes: 256,
maintenanceIntervalMs: 24 * 60 * 60 * 1000,
retentionMode: 'preset',
retentionPreset: 'balanced',
retention: {
eventsDays: 7,
telemetryDays: 30,
dailyRollupsDays: 365,
monthlyRollupsDays: 5 * 365,
vacuumIntervalDays: 7,
eventsDays: 0,
telemetryDays: 0,
sessionsDays: 0,
dailyRollupsDays: 0,
monthlyRollupsDays: 0,
vacuumIntervalDays: 0,
},
lifetimeSummaries: {
global: true,
anime: true,
media: true,
},
},
};

View File

@@ -48,35 +48,73 @@ export function buildImmersionConfigOptionRegistry(
defaultValue: defaultConfig.immersionTracking.maintenanceIntervalMs,
description: 'Maintenance cadence (prune + rollup + vacuum checks).',
},
{
path: 'immersionTracking.retentionMode',
kind: 'string',
defaultValue: defaultConfig.immersionTracking.retentionMode,
description: 'Retention mode (`preset` uses preset values, `advanced` uses explicit values).',
enumValues: ['preset', 'advanced'],
},
{
path: 'immersionTracking.retentionPreset',
kind: 'string',
defaultValue: defaultConfig.immersionTracking.retentionPreset,
description: 'Retention preset when `retentionMode` is `preset`.',
enumValues: ['minimal', 'balanced', 'deep-history'],
},
{
path: 'immersionTracking.retention.eventsDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.eventsDays,
description: 'Raw event retention window in days.',
description: 'Raw event retention window in days. Use 0 to keep all.',
},
{
path: 'immersionTracking.retention.telemetryDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.telemetryDays,
description: 'Telemetry retention window in days.',
description: 'Telemetry retention window in days. Use 0 to keep all.',
},
{
path: 'immersionTracking.retention.sessionsDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.sessionsDays,
description: 'Session retention window in days. Use 0 to keep all.',
},
{
path: 'immersionTracking.retention.dailyRollupsDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.dailyRollupsDays,
description: 'Daily rollup retention window in days.',
description: 'Daily rollup retention window in days. Use 0 to keep all.',
},
{
path: 'immersionTracking.retention.monthlyRollupsDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.monthlyRollupsDays,
description: 'Monthly rollup retention window in days.',
description: 'Monthly rollup retention window in days. Use 0 to keep all.',
},
{
path: 'immersionTracking.retention.vacuumIntervalDays',
kind: 'number',
defaultValue: defaultConfig.immersionTracking.retention.vacuumIntervalDays,
description: 'Minimum days between VACUUM runs.',
description: 'Minimum days between VACUUM runs. Use 0 to disable.',
},
{
path: 'immersionTracking.lifetimeSummaries.global',
kind: 'boolean',
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.global,
description: 'Maintain global lifetime stats rows.',
},
{
path: 'immersionTracking.lifetimeSummaries.anime',
kind: 'boolean',
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.anime,
description: 'Maintain per-anime lifetime stats rows.',
},
{
path: 'immersionTracking.lifetimeSummaries.media',
kind: 'boolean',
defaultValue: defaultConfig.immersionTracking.lifetimeSummaries?.media,
description: 'Maintain per-media lifetime stats rows.',
},
];
}

View File

@@ -1,9 +1,68 @@
import { ResolveContext } from './context';
import { ImmersionTrackingRetentionMode, ImmersionTrackingRetentionPreset } from '../../types';
import { asBoolean, asNumber, asString, isObject } from './shared';
const DEFAULT_RETENTION_MODE: ImmersionTrackingRetentionMode = 'preset';
const DEFAULT_RETENTION_PRESET: ImmersionTrackingRetentionPreset = 'balanced';
const BASE_RETENTION = {
eventsDays: 0,
telemetryDays: 0,
sessionsDays: 0,
dailyRollupsDays: 0,
monthlyRollupsDays: 0,
vacuumIntervalDays: 0,
};
const RETENTION_PRESETS: Record<ImmersionTrackingRetentionPreset, typeof BASE_RETENTION> = {
minimal: {
eventsDays: 3,
telemetryDays: 14,
sessionsDays: 14,
dailyRollupsDays: 30,
monthlyRollupsDays: 365,
vacuumIntervalDays: 7,
},
balanced: BASE_RETENTION,
'deep-history': {
eventsDays: 14,
telemetryDays: 60,
sessionsDays: 60,
dailyRollupsDays: 730,
monthlyRollupsDays: 5 * 365,
vacuumIntervalDays: 7,
},
};
const DEFAULT_LIFETIME_SUMMARIES = {
global: true,
anime: true,
media: true,
};
function asRetentionMode(value: unknown): value is ImmersionTrackingRetentionMode {
return value === 'preset' || value === 'advanced';
}
function asRetentionPreset(value: unknown): value is ImmersionTrackingRetentionPreset {
return value === 'minimal' || value === 'balanced' || value === 'deep-history';
}
export function applyImmersionTrackingConfig(context: ResolveContext): void {
const { src, resolved, warn } = context;
if (!isObject(src.immersionTracking)) {
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
resolved.immersionTracking.retentionPreset = DEFAULT_RETENTION_PRESET;
resolved.immersionTracking.retention = {
...BASE_RETENTION,
};
resolved.immersionTracking.lifetimeSummaries = {
...DEFAULT_LIFETIME_SUMMARIES,
};
return;
}
if (isObject(src.immersionTracking)) {
const enabled = asBoolean(src.immersionTracking.enabled);
if (enabled !== undefined) {
@@ -93,81 +152,186 @@ export function applyImmersionTrackingConfig(context: ResolveContext): void {
);
}
const retentionMode = asString(src.immersionTracking.retentionMode);
if (asRetentionMode(retentionMode)) {
resolved.immersionTracking.retentionMode = retentionMode;
} else if (src.immersionTracking.retentionMode !== undefined) {
warn(
'immersionTracking.retentionMode',
src.immersionTracking.retentionMode,
DEFAULT_RETENTION_MODE,
'Expected "preset" or "advanced".',
);
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
} else {
resolved.immersionTracking.retentionMode = DEFAULT_RETENTION_MODE;
}
const retentionPreset = asString(src.immersionTracking.retentionPreset);
if (asRetentionPreset(retentionPreset)) {
resolved.immersionTracking.retentionPreset = retentionPreset;
} else if (src.immersionTracking.retentionPreset !== undefined) {
warn(
'immersionTracking.retentionPreset',
src.immersionTracking.retentionPreset,
DEFAULT_RETENTION_PRESET,
'Expected "minimal", "balanced", or "deep-history".',
);
resolved.immersionTracking.retentionPreset = DEFAULT_RETENTION_PRESET;
} else {
resolved.immersionTracking.retentionPreset =
resolved.immersionTracking.retentionPreset ?? DEFAULT_RETENTION_PRESET;
}
const resolvedPreset =
resolved.immersionTracking.retentionPreset === 'minimal' ||
resolved.immersionTracking.retentionPreset === 'balanced' ||
resolved.immersionTracking.retentionPreset === 'deep-history'
? resolved.immersionTracking.retentionPreset
: DEFAULT_RETENTION_PRESET;
const baseRetention =
resolved.immersionTracking.retentionMode === 'preset'
? RETENTION_PRESETS[resolvedPreset]
: BASE_RETENTION;
const retention = {
eventsDays: baseRetention.eventsDays,
telemetryDays: baseRetention.telemetryDays,
sessionsDays: baseRetention.sessionsDays,
dailyRollupsDays: baseRetention.dailyRollupsDays,
monthlyRollupsDays: baseRetention.monthlyRollupsDays,
vacuumIntervalDays: baseRetention.vacuumIntervalDays,
};
if (isObject(src.immersionTracking.retention)) {
const eventsDays = asNumber(src.immersionTracking.retention.eventsDays);
if (eventsDays !== undefined && eventsDays >= 1 && eventsDays <= 3650) {
resolved.immersionTracking.retention.eventsDays = Math.floor(eventsDays);
if (eventsDays !== undefined && eventsDays >= 0 && eventsDays <= 3650) {
retention.eventsDays = Math.floor(eventsDays);
} else if (src.immersionTracking.retention.eventsDays !== undefined) {
warn(
'immersionTracking.retention.eventsDays',
src.immersionTracking.retention.eventsDays,
resolved.immersionTracking.retention.eventsDays,
'Expected integer between 1 and 3650.',
retention.eventsDays,
'Expected integer between 0 and 3650.',
);
}
const telemetryDays = asNumber(src.immersionTracking.retention.telemetryDays);
if (telemetryDays !== undefined && telemetryDays >= 1 && telemetryDays <= 3650) {
resolved.immersionTracking.retention.telemetryDays = Math.floor(telemetryDays);
if (telemetryDays !== undefined && telemetryDays >= 0 && telemetryDays <= 3650) {
retention.telemetryDays = Math.floor(telemetryDays);
} else if (src.immersionTracking.retention.telemetryDays !== undefined) {
warn(
'immersionTracking.retention.telemetryDays',
src.immersionTracking.retention.telemetryDays,
resolved.immersionTracking.retention.telemetryDays,
'Expected integer between 1 and 3650.',
retention.telemetryDays,
'Expected integer between 0 and 3650.',
);
}
const sessionsDays = asNumber(src.immersionTracking.retention.sessionsDays);
if (sessionsDays !== undefined && sessionsDays >= 0 && sessionsDays <= 3650) {
retention.sessionsDays = Math.floor(sessionsDays);
} else if (src.immersionTracking.retention.sessionsDays !== undefined) {
warn(
'immersionTracking.retention.sessionsDays',
src.immersionTracking.retention.sessionsDays,
retention.sessionsDays,
'Expected integer between 0 and 3650.',
);
}
const dailyRollupsDays = asNumber(src.immersionTracking.retention.dailyRollupsDays);
if (dailyRollupsDays !== undefined && dailyRollupsDays >= 1 && dailyRollupsDays <= 36500) {
resolved.immersionTracking.retention.dailyRollupsDays = Math.floor(dailyRollupsDays);
if (
dailyRollupsDays !== undefined &&
dailyRollupsDays >= 0 &&
dailyRollupsDays <= 36500
) {
retention.dailyRollupsDays = Math.floor(dailyRollupsDays);
} else if (src.immersionTracking.retention.dailyRollupsDays !== undefined) {
warn(
'immersionTracking.retention.dailyRollupsDays',
src.immersionTracking.retention.dailyRollupsDays,
resolved.immersionTracking.retention.dailyRollupsDays,
'Expected integer between 1 and 36500.',
retention.dailyRollupsDays,
'Expected integer between 0 and 36500.',
);
}
const monthlyRollupsDays = asNumber(src.immersionTracking.retention.monthlyRollupsDays);
if (
monthlyRollupsDays !== undefined &&
monthlyRollupsDays >= 1 &&
monthlyRollupsDays >= 0 &&
monthlyRollupsDays <= 36500
) {
resolved.immersionTracking.retention.monthlyRollupsDays = Math.floor(monthlyRollupsDays);
retention.monthlyRollupsDays = Math.floor(monthlyRollupsDays);
} else if (src.immersionTracking.retention.monthlyRollupsDays !== undefined) {
warn(
'immersionTracking.retention.monthlyRollupsDays',
src.immersionTracking.retention.monthlyRollupsDays,
resolved.immersionTracking.retention.monthlyRollupsDays,
'Expected integer between 1 and 36500.',
retention.monthlyRollupsDays,
'Expected integer between 0 and 36500.',
);
}
const vacuumIntervalDays = asNumber(src.immersionTracking.retention.vacuumIntervalDays);
if (
vacuumIntervalDays !== undefined &&
vacuumIntervalDays >= 1 &&
vacuumIntervalDays <= 3650
) {
resolved.immersionTracking.retention.vacuumIntervalDays = Math.floor(vacuumIntervalDays);
if (vacuumIntervalDays !== undefined && vacuumIntervalDays >= 0 && vacuumIntervalDays <= 3650) {
retention.vacuumIntervalDays = Math.floor(vacuumIntervalDays);
} else if (src.immersionTracking.retention.vacuumIntervalDays !== undefined) {
warn(
'immersionTracking.retention.vacuumIntervalDays',
src.immersionTracking.retention.vacuumIntervalDays,
resolved.immersionTracking.retention.vacuumIntervalDays,
'Expected integer between 1 and 3650.',
retention.vacuumIntervalDays,
'Expected integer between 0 and 3650.',
);
}
} else if (src.immersionTracking.retention !== undefined) {
warn(
'immersionTracking.retention',
src.immersionTracking.retention,
resolved.immersionTracking.retention,
baseRetention,
'Expected object.',
);
}
resolved.immersionTracking.retention = {
eventsDays: retention.eventsDays,
telemetryDays: retention.telemetryDays,
sessionsDays: retention.sessionsDays,
dailyRollupsDays: retention.dailyRollupsDays,
monthlyRollupsDays: retention.monthlyRollupsDays,
vacuumIntervalDays: retention.vacuumIntervalDays,
};
const lifetimeSummaries = {
global: DEFAULT_LIFETIME_SUMMARIES.global,
anime: DEFAULT_LIFETIME_SUMMARIES.anime,
media: DEFAULT_LIFETIME_SUMMARIES.media,
};
if (isObject(src.immersionTracking.lifetimeSummaries)) {
const global = asBoolean(src.immersionTracking.lifetimeSummaries.global);
if (global !== undefined) {
lifetimeSummaries.global = global;
}
const anime = asBoolean(src.immersionTracking.lifetimeSummaries.anime);
if (anime !== undefined) {
lifetimeSummaries.anime = anime;
}
const media = asBoolean(src.immersionTracking.lifetimeSummaries.media);
if (media !== undefined) {
lifetimeSummaries.media = media;
}
} else if (src.immersionTracking.lifetimeSummaries !== undefined) {
warn(
'immersionTracking.lifetimeSummaries',
src.immersionTracking.lifetimeSummaries,
DEFAULT_LIFETIME_SUMMARIES,
'Expected object.',
);
}
resolved.immersionTracking.lifetimeSummaries = lifetimeSummaries;
}
}

View File

@@ -217,6 +217,11 @@ function createMockTracker(
activeSessions: 1,
episodesToday: 2,
activeAnimeCount: 3,
totalEpisodesWatched: 0,
totalAnimeCompleted: 0,
totalActiveMin: 120,
totalCards: 0,
activeDays: 7,
}),
getSessionTimeline: async () => [],
getSessionEvents: async () => [],
@@ -289,6 +294,10 @@ describe('stats server API routes', () => {
assert.equal(body.hints.activeSessions, 1);
assert.equal(body.hints.episodesToday, 2);
assert.equal(body.hints.activeAnimeCount, 3);
assert.equal(body.hints.totalEpisodesWatched, 0);
assert.equal(body.hints.totalAnimeCompleted, 0);
assert.equal(body.hints.totalActiveMin, 120);
assert.equal(body.hints.activeDays, 7);
});
it('GET /api/stats/sessions returns session list', async () => {

View File

@@ -234,7 +234,7 @@ test('fetchIfMissing falls back to internal parser when guessit throws', async (
const stored = getCoverArt(db, videoId);
assert.equal(fetched, true);
assert.equal(requestCount, 1);
assert.equal(requestCount, 2);
assert.equal(stored?.anilistId, 21);
} finally {
globalThis.fetch = originalFetch;

View File

@@ -200,10 +200,10 @@ function pickBestSearchResult(
function buildSearchCandidates(parsed: CoverArtCandidate): string[] {
const candidateTitles = [
parsed.title,
...(parsed.source === 'guessit' && parsed.season !== null && parsed.season > 1
? [`${parsed.title} Season ${parsed.season}`]
: []),
parsed.title,
];
return candidateTitles
.map((title) => title.trim())
@@ -257,8 +257,27 @@ export function createCoverArtFetcher(
logger: Logger,
options: CoverArtFetcherOptions = {},
): CoverArtFetcher {
const resolveMediaInfo = async (canonicalTitle: string): Promise<CoverArtCandidate | null> => {
const parsed = await guessAnilistMediaInfo(null, canonicalTitle, {
const resolveCanonicalTitle = (db: DatabaseSync, videoId: number, fallbackTitle: string): string => {
const row = db
.prepare(
`
SELECT canonical_title AS canonicalTitle
FROM imm_videos
WHERE video_id = ?
LIMIT 1
`,
)
.get(videoId) as { canonicalTitle: string | null } | undefined;
return row?.canonicalTitle?.trim() || fallbackTitle;
};
const resolveMediaInfo = async (
db: DatabaseSync,
videoId: number,
canonicalTitle: string,
): Promise<CoverArtCandidate | null> => {
const effectiveTitle = resolveCanonicalTitle(db, videoId, canonicalTitle);
const parsed = await guessAnilistMediaInfo(null, effectiveTitle, {
runGuessit: options.runGuessit ?? runGuessit,
});
if (!parsed) {
@@ -303,7 +322,8 @@ export function createCoverArtFetcher(
return false;
}
const cleaned = stripFilenameTags(canonicalTitle);
const effectiveTitle = resolveCanonicalTitle(db, videoId, canonicalTitle);
const cleaned = stripFilenameTags(effectiveTitle);
if (!cleaned) {
logger.warn('cover-art: empty title after stripping tags for videoId=%d', videoId);
upsertCoverArt(db, videoId, {
@@ -317,7 +337,7 @@ export function createCoverArtFetcher(
return false;
}
const parsedInfo = await resolveMediaInfo(canonicalTitle);
const parsedInfo = await resolveMediaInfo(db, videoId, canonicalTitle);
const searchBase = parsedInfo?.title ?? cleaned;
const searchCandidates = parsedInfo ? buildSearchCandidates(parsedInfo) : [cleaned];

View File

@@ -184,6 +184,595 @@ test('destroy finalizes active session and persists final telemetry', async () =
}
});
test('finalize updates lifetime summary rows from final session metrics', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E05.mkv', 'Episode 5');
await waitForPendingAnimeMetadata(tracker);
const privateApi = tracker as unknown as {
sessionState: { sessionId: number; videoId: number } | null;
};
const sessionId = privateApi.sessionState?.sessionId;
const videoId = privateApi.sessionState?.videoId;
assert.ok(sessionId);
assert.ok(videoId);
tracker.recordCardsMined(2);
tracker.recordSubtitleLine('today is bright', 0, 1.2);
tracker.recordLookup(true);
tracker.destroy();
const db = new Database(dbPath);
const globalRow = db
.prepare('SELECT total_sessions, total_cards, total_active_ms FROM imm_lifetime_global')
.get() as {
total_sessions: number;
total_cards: number;
total_active_ms: number;
} | null;
const mediaRow = db
.prepare(
'SELECT total_sessions, total_cards, total_active_ms, total_words_seen, total_lines_seen FROM imm_lifetime_media WHERE video_id = ?',
)
.get(videoId) as {
total_sessions: number;
total_cards: number;
total_active_ms: number;
total_words_seen: number;
total_lines_seen: number;
} | null;
const animeIdRow = db
.prepare('SELECT anime_id FROM imm_videos WHERE video_id = ?')
.get(videoId) as { anime_id: number | null } | null;
const animeRow = animeIdRow?.anime_id
? (db
.prepare('SELECT total_sessions, total_cards FROM imm_lifetime_anime WHERE anime_id = ?')
.get(animeIdRow.anime_id) as {
total_sessions: number;
total_cards: number;
} | null)
: null;
const appliedRow = db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions WHERE session_id = ?')
.get(sessionId) as {
total: number;
} | null;
db.close();
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 1);
assert.equal(globalRow?.total_cards, 2);
assert.ok(Number(globalRow?.total_active_ms ?? 0) >= 0);
assert.ok(mediaRow);
assert.equal(mediaRow?.total_sessions, 1);
assert.equal(mediaRow?.total_cards, 2);
assert.equal(mediaRow?.total_lines_seen, 1);
assert.ok(animeRow);
assert.equal(animeRow?.total_sessions, 1);
assert.equal(animeRow?.total_cards, 2);
assert.equal(appliedRow?.total, 1);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('lifetime updates are not double-counted if finalize runs multiple times', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E06.mkv', 'Episode 6');
await waitForPendingAnimeMetadata(tracker);
const privateApi = tracker as unknown as {
finalizeActiveSession: () => void;
sessionState: { sessionId: number; videoId: number } | null;
};
const sessionState = privateApi.sessionState;
const sessionId = sessionState?.sessionId;
assert.ok(sessionId);
tracker.recordCardsMined(3);
privateApi.finalizeActiveSession();
privateApi.sessionState = sessionState;
privateApi.finalizeActiveSession();
const db = new Database(dbPath);
const globalRow = db
.prepare('SELECT total_sessions, total_cards FROM imm_lifetime_global')
.get() as {
total_sessions: number;
total_cards: number;
} | null;
const appliedRow = db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions WHERE session_id = ?')
.get(sessionId) as {
total: number;
} | null;
db.close();
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 1);
assert.equal(globalRow?.total_cards, 3);
assert.equal(appliedRow?.total, 1);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('lifetime counters use distinct-day and distinct-video semantics', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E05.mkv', 'Episode 5');
await waitForPendingAnimeMetadata(tracker);
let privateApi = tracker as unknown as {
db: DatabaseSync;
sessionState: { sessionId: number; videoId: number } | null;
};
const firstVideoId = privateApi.sessionState?.videoId;
assert.ok(firstVideoId);
const animeId = (
privateApi.db
.prepare('SELECT anime_id FROM imm_videos WHERE video_id = ?')
.get(firstVideoId) as {
anime_id: number | null;
} | null
)?.anime_id;
assert.ok(animeId);
privateApi.db
.prepare('UPDATE imm_anime SET episodes_total = 2 WHERE anime_id = ?')
.run(animeId);
await tracker.setVideoWatched(firstVideoId, true);
tracker.destroy();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E05.mkv', 'Episode 5');
await waitForPendingAnimeMetadata(tracker);
privateApi = tracker as unknown as typeof privateApi;
const repeatedSessionApi = tracker as unknown as {
sessionState: { sessionId: number; videoId: number } | null;
};
const repeatedVideoId = repeatedSessionApi.sessionState?.videoId;
assert.equal(repeatedVideoId, firstVideoId);
await tracker.setVideoWatched(repeatedVideoId, true);
tracker.destroy();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E06.mkv', 'Episode 6');
await waitForPendingAnimeMetadata(tracker);
privateApi = tracker as unknown as typeof privateApi;
const secondSessionApi = tracker as unknown as {
sessionState: { sessionId: number; videoId: number } | null;
};
const secondVideoId = secondSessionApi.sessionState?.videoId;
assert.ok(secondVideoId);
assert.ok(secondVideoId !== firstVideoId);
await tracker.setVideoWatched(secondVideoId, true);
tracker.destroy();
const db = new Database(dbPath);
const globalRow = db
.prepare(
'SELECT total_sessions, active_days, episodes_started, episodes_completed, anime_completed FROM imm_lifetime_global',
)
.get() as {
total_sessions: number;
active_days: number;
episodes_started: number;
episodes_completed: number;
anime_completed: number;
} | null;
const firstMediaRow = db
.prepare('SELECT completed FROM imm_lifetime_media WHERE video_id = ?')
.get(firstVideoId) as { completed: number } | null;
const secondMediaRow = db
.prepare('SELECT completed FROM imm_lifetime_media WHERE video_id = ?')
.get(secondVideoId) as { completed: number } | null;
const animeRow = db
.prepare(
'SELECT episodes_started, episodes_completed FROM imm_lifetime_anime WHERE anime_id = ?',
)
.get(animeId) as { episodes_started: number; episodes_completed: number } | null;
db.close();
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 3);
assert.equal(globalRow?.active_days, 1);
assert.equal(globalRow?.episodes_started, 2);
assert.equal(globalRow?.episodes_completed, 2);
assert.equal(globalRow?.anime_completed, 1);
assert.ok(firstMediaRow);
assert.equal(firstMediaRow?.completed, 1);
assert.ok(secondMediaRow);
assert.equal(secondMediaRow?.completed, 1);
assert.ok(animeRow);
assert.equal(animeRow?.episodes_started, 2);
assert.equal(animeRow?.episodes_completed, 2);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('rebuildLifetimeSummaries backfills retained ended sessions and resets stale lifetime rows', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E05.mkv', 'Episode 5');
await waitForPendingAnimeMetadata(tracker);
const firstApi = tracker as unknown as {
db: DatabaseSync;
sessionState: { videoId: number } | null;
};
const firstVideoId = firstApi.sessionState?.videoId;
if (firstVideoId == null) {
throw new Error('Expected first session video id');
}
const animeId = (
firstApi.db
.prepare('SELECT anime_id FROM imm_videos WHERE video_id = ?')
.get(firstVideoId) as {
anime_id: number | null;
} | null
)?.anime_id;
assert.ok(animeId);
firstApi.db.prepare('UPDATE imm_anime SET episodes_total = 2 WHERE anime_id = ?').run(animeId);
tracker.recordCardsMined(2);
await tracker.setVideoWatched(firstVideoId, true);
tracker.destroy();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/Little Witch Academia S02E06.mkv', 'Episode 6');
await waitForPendingAnimeMetadata(tracker);
const secondApi = tracker as unknown as {
sessionState: { videoId: number } | null;
};
const secondVideoId = secondApi.sessionState?.videoId;
if (secondVideoId == null) {
throw new Error('Expected second session video id');
}
tracker.recordCardsMined(1);
await tracker.setVideoWatched(secondVideoId, true);
tracker.destroy();
tracker = new Ctor({ dbPath });
const rebuildApi = tracker as unknown as { db: DatabaseSync };
rebuildApi.db
.prepare(
`
UPDATE imm_lifetime_global
SET
total_sessions = 99,
total_cards = 77,
episodes_started = 88,
episodes_completed = 66
WHERE global_id = 1
`,
)
.run();
rebuildApi.db.exec(`
DELETE FROM imm_lifetime_media;
DELETE FROM imm_lifetime_anime;
DELETE FROM imm_lifetime_applied_sessions;
`);
const rebuild = await tracker.rebuildLifetimeSummaries();
const globalRow = rebuildApi.db
.prepare(
'SELECT total_sessions, total_cards, episodes_started, episodes_completed, anime_completed, last_rebuilt_ms FROM imm_lifetime_global WHERE global_id = 1',
)
.get() as {
total_sessions: number;
total_cards: number;
episodes_started: number;
episodes_completed: number;
anime_completed: number;
last_rebuilt_ms: number | null;
} | null;
const appliedSessions = rebuildApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions')
.get() as { total: number } | null;
assert.equal(rebuild.appliedSessions, 2);
assert.ok(rebuild.rebuiltAtMs > 0);
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 2);
assert.equal(globalRow?.total_cards, 3);
assert.equal(globalRow?.episodes_started, 2);
assert.equal(globalRow?.episodes_completed, 2);
assert.equal(globalRow?.anime_completed, 1);
assert.equal(globalRow?.last_rebuilt_ms, rebuild.rebuiltAtMs);
assert.equal(appliedSessions?.total, 2);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('fresh tracker DB creates lifetime summary tables', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
const db = new Database(dbPath);
const tableRows = db
.prepare("SELECT name FROM sqlite_master WHERE type='table' ORDER BY name")
.all() as Array<{ name: string }>;
db.close();
const tableNames = new Set(tableRows.map((row) => row.name));
const expectedTables = [
'imm_lifetime_global',
'imm_lifetime_anime',
'imm_lifetime_media',
'imm_lifetime_applied_sessions',
];
for (const tableName of expectedTables) {
assert.ok(tableNames.has(tableName), `Expected ${tableName} to exist`);
}
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('startup backfills lifetime summaries when retained sessions exist but summary tables are empty', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/KonoSuba S02E05.mkv', 'Episode 5');
await waitForPendingAnimeMetadata(tracker);
tracker.recordCardsMined(2);
tracker.destroy();
const db = new Database(dbPath);
db.exec(`
DELETE FROM imm_lifetime_media;
DELETE FROM imm_lifetime_anime;
DELETE FROM imm_lifetime_applied_sessions;
UPDATE imm_lifetime_global
SET
total_sessions = 0,
total_active_ms = 0,
total_cards = 0,
active_days = 0,
episodes_started = 0,
episodes_completed = 0,
anime_completed = 0
WHERE global_id = 1;
`);
db.close();
tracker = new Ctor({ dbPath });
const trackerApi = tracker as unknown as { db: DatabaseSync };
const globalRow = trackerApi.db
.prepare(
'SELECT total_sessions, total_cards, active_days FROM imm_lifetime_global WHERE global_id = 1',
)
.get() as {
total_sessions: number;
total_cards: number;
active_days: number;
} | null;
const mediaRows = trackerApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media')
.get() as { total: number } | null;
const appliedRows = trackerApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions')
.get() as { total: number } | null;
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 1);
assert.equal(globalRow?.total_cards, 2);
assert.equal(globalRow?.active_days, 1);
assert.equal(mediaRows?.total, 1);
assert.equal(appliedRows?.total, 1);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('startup finalizes stale active sessions and applies lifetime summaries', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
const trackerApi = tracker as unknown as { db: DatabaseSync };
const db = trackerApi.db;
const startedAtMs = Date.now() - 10_000;
const sampleMs = startedAtMs + 5_000;
db.exec(`
INSERT INTO imm_anime (
anime_id,
canonical_title,
normalized_title_key,
episodes_total,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'KonoSuba',
'konosuba',
10,
${startedAtMs},
${startedAtMs}
);
INSERT INTO imm_videos (
video_id,
video_key,
canonical_title,
anime_id,
watched,
source_type,
duration_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'local:/tmp/konosuba-s02e05.mkv',
'KonoSuba S02E05',
1,
1,
1,
0,
${startedAtMs},
${startedAtMs}
);
INSERT INTO imm_sessions (
session_id,
session_uuid,
video_id,
started_at_ms,
status,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'11111111-1111-1111-1111-111111111111',
1,
${startedAtMs},
1,
${startedAtMs},
${sampleMs}
);
INSERT INTO imm_session_telemetry (
session_id,
sample_ms,
total_watched_ms,
active_watched_ms,
lines_seen,
words_seen,
tokens_seen,
cards_mined,
lookup_count,
lookup_hits,
pause_count,
pause_ms,
seek_forward_count,
seek_backward_count,
media_buffer_events
) VALUES (
1,
${sampleMs},
5000,
4000,
12,
90,
120,
2,
5,
3,
1,
250,
1,
0,
0
);
`);
tracker.destroy();
tracker = new Ctor({ dbPath });
const restartedApi = tracker as unknown as { db: DatabaseSync };
const sessionRow = restartedApi.db
.prepare(
`
SELECT ended_at_ms, status, active_watched_ms, words_seen, cards_mined
FROM imm_sessions
WHERE session_id = 1
`,
)
.get() as {
ended_at_ms: number | null;
status: number;
active_watched_ms: number;
words_seen: number;
cards_mined: number;
} | null;
const globalRow = restartedApi.db
.prepare(
`
SELECT total_sessions, total_active_ms, total_cards, active_days, episodes_started,
episodes_completed
FROM imm_lifetime_global
WHERE global_id = 1
`,
)
.get() as {
total_sessions: number;
total_active_ms: number;
total_cards: number;
active_days: number;
episodes_started: number;
episodes_completed: number;
} | null;
const mediaRows = restartedApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media')
.get() as { total: number } | null;
const animeRows = restartedApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_anime')
.get() as { total: number } | null;
const appliedRows = restartedApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions')
.get() as { total: number } | null;
assert.ok(sessionRow);
assert.ok(Number(sessionRow?.ended_at_ms ?? 0) >= sampleMs);
assert.equal(sessionRow?.status, 2);
assert.equal(sessionRow?.active_watched_ms, 4000);
assert.equal(sessionRow?.words_seen, 90);
assert.equal(sessionRow?.cards_mined, 2);
assert.ok(globalRow);
assert.equal(globalRow?.total_sessions, 1);
assert.equal(globalRow?.total_active_ms, 4000);
assert.equal(globalRow?.total_cards, 2);
assert.equal(globalRow?.active_days, 1);
assert.equal(globalRow?.episodes_started, 1);
assert.equal(globalRow?.episodes_completed, 1);
assert.equal(mediaRows?.total, 1);
assert.equal(animeRows?.total, 1);
assert.equal(appliedRows?.total, 1);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('persists and retrieves minimum immersion tracking fields', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -420,6 +1009,50 @@ test('recordSubtitleLine persists counted allowed tokenized vocabulary rows and
}
});
test('subtitle-line event payload omits duplicated subtitle text', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/payload-dup-test.mkv', 'Payload Dup Test');
tracker.recordSubtitleLine('same line text', 0, 1);
const privateApi = tracker as unknown as {
flushTelemetry: (force?: boolean) => void;
flushNow: () => void;
db: DatabaseSync;
};
privateApi.flushTelemetry(true);
privateApi.flushNow();
const row = privateApi.db
.prepare(
`
SELECT payload_json AS payloadJson
FROM imm_session_events
WHERE event_type = ?
ORDER BY event_id DESC
LIMIT 1
`,
)
.get(1) as { payloadJson: string | null } | null;
assert.ok(row?.payloadJson);
const parsed = JSON.parse(row?.payloadJson ?? '{}') as {
event?: string;
words?: number;
text?: string;
};
assert.equal(parsed.event, 'subtitle-line');
assert.equal(typeof parsed.words, 'number');
assert.equal('text' in parsed, false);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('handleMediaChange links parsed anime metadata on the active video row', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -572,6 +1205,7 @@ test('applies configurable queue, flush, and retention policy', async () => {
retention: {
eventsDays: 14,
telemetryDays: 45,
sessionsDays: 60,
dailyRollupsDays: 730,
monthlyRollupsDays: 3650,
vacuumIntervalDays: 14,
@@ -587,6 +1221,7 @@ test('applies configurable queue, flush, and retention policy', async () => {
maintenanceIntervalMs: number;
eventsRetentionMs: number;
telemetryRetentionMs: number;
sessionsRetentionMs: number;
dailyRollupRetentionMs: number;
monthlyRollupRetentionMs: number;
vacuumIntervalMs: number;
@@ -599,6 +1234,7 @@ test('applies configurable queue, flush, and retention policy', async () => {
assert.equal(privateApi.maintenanceIntervalMs, 7_200_000);
assert.equal(privateApi.eventsRetentionMs, 14 * 86_400_000);
assert.equal(privateApi.telemetryRetentionMs, 45 * 86_400_000);
assert.equal(privateApi.sessionsRetentionMs, 60 * 86_400_000);
assert.equal(privateApi.dailyRollupRetentionMs, 730 * 86_400_000);
assert.equal(privateApi.monthlyRollupRetentionMs, 3650 * 86_400_000);
assert.equal(privateApi.vacuumIntervalMs, 14 * 86_400_000);
@@ -608,6 +1244,180 @@ test('applies configurable queue, flush, and retention policy', async () => {
}
});
test('zero retention days disables prune checks while preserving rollups', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({
dbPath,
policy: {
retention: {
eventsDays: 0,
telemetryDays: 0,
sessionsDays: 0,
dailyRollupsDays: 0,
monthlyRollupsDays: 0,
vacuumIntervalDays: 0,
},
},
});
const privateApi = tracker as unknown as {
runMaintenance: () => void;
db: DatabaseSync;
eventsRetentionMs: number;
telemetryRetentionMs: number;
sessionsRetentionMs: number;
dailyRollupRetentionMs: number;
monthlyRollupRetentionMs: number;
vacuumIntervalMs: number;
lastVacuumMs: number;
};
assert.equal(privateApi.eventsRetentionMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.telemetryRetentionMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.sessionsRetentionMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.dailyRollupRetentionMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.monthlyRollupRetentionMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.vacuumIntervalMs, Number.POSITIVE_INFINITY);
assert.equal(privateApi.lastVacuumMs, 0);
const nowMs = Date.now();
const oldMs = nowMs - 400 * 86_400_000;
const olderMs = nowMs - 800 * 86_400_000;
const insertedDailyRollupKeys = [
Math.floor(olderMs / 86_400_000) - 10,
Math.floor(oldMs / 86_400_000) - 5,
];
const insertedMonthlyRollupKeys = [
toMonthKey(olderMs - 400 * 86_400_000),
toMonthKey(oldMs - 700 * 86_400_000),
];
privateApi.db.exec(`
INSERT INTO imm_videos (
video_id,
video_key,
canonical_title,
source_type,
duration_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'local:/tmp/video.mkv',
'Episode',
1,
0,
${olderMs},
${olderMs}
)
`);
privateApi.db.exec(`
INSERT INTO imm_sessions (
session_id,
session_uuid,
video_id,
started_at_ms,
ended_at_ms,
status,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES
(1, 'session-1', 1, ${olderMs}, ${olderMs + 1_000}, 2, ${olderMs}, ${olderMs}),
(2, 'session-2', 1, ${oldMs}, ${oldMs + 1_000}, 2, ${oldMs}, ${oldMs})
`);
privateApi.db.exec(`
INSERT INTO imm_session_events (
session_id,
ts_ms,
event_type,
segment_start_ms,
segment_end_ms,
created_date,
last_update_date
) VALUES
(1, ${olderMs}, 1, 0, 1, ${olderMs}, ${olderMs}),
(2, ${oldMs}, 1, 2, 3, ${oldMs}, ${oldMs})
`);
privateApi.db.exec(`
INSERT INTO imm_session_telemetry (
session_id,
sample_ms,
total_watched_ms,
active_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES
(1, ${olderMs}, 1000, 1000, ${olderMs}, ${olderMs}),
(2, ${oldMs}, 2000, 1500, ${oldMs}, ${oldMs})
`);
privateApi.db.exec(`
INSERT INTO imm_daily_rollups (
rollup_day,
video_id,
total_sessions,
total_active_min,
total_lines_seen,
total_words_seen,
total_tokens_seen,
total_cards
) VALUES
(${insertedDailyRollupKeys[0]}, 1, 1, 1, 1, 1, 1, 1),
(${insertedDailyRollupKeys[1]}, 1, 1, 1, 1, 1, 1, 1)
`);
privateApi.db.exec(`
INSERT INTO imm_monthly_rollups (
rollup_month,
video_id,
total_sessions,
total_active_min,
total_lines_seen,
total_words_seen,
total_tokens_seen,
total_cards,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES
(${insertedMonthlyRollupKeys[0]}, 1, 1, 1, 1, 1, 1, 1, ${olderMs}, ${olderMs}),
(${insertedMonthlyRollupKeys[1]}, 1, 1, 1, 1, 1, 1, 1, ${oldMs}, ${oldMs})
`);
privateApi.runMaintenance();
const rawEvents = privateApi.db
.prepare('SELECT COUNT(*) as total FROM imm_session_events WHERE session_id IN (1,2)')
.get() as { total: number };
const rawTelemetry = privateApi.db
.prepare('SELECT COUNT(*) as total FROM imm_session_telemetry WHERE session_id IN (1,2)')
.get() as { total: number };
const endedSessions = privateApi.db
.prepare('SELECT COUNT(*) as total FROM imm_sessions WHERE session_id IN (1,2)')
.get() as { total: number };
const dailyRollups = privateApi.db
.prepare(
'SELECT COUNT(*) as total FROM imm_daily_rollups WHERE video_id = 1 AND rollup_day IN (?, ?)',
)
.get(insertedDailyRollupKeys[0], insertedDailyRollupKeys[1]) as { total: number };
const monthlyRollups = privateApi.db
.prepare(
'SELECT COUNT(*) as total FROM imm_monthly_rollups WHERE video_id = 1 AND rollup_month IN (?, ?)',
)
.get(insertedMonthlyRollupKeys[0], insertedMonthlyRollupKeys[1]) as { total: number };
assert.equal(rawEvents.total, 2);
assert.equal(rawTelemetry.total, 2);
assert.equal(endedSessions.total, 2);
assert.equal(dailyRollups.total, 2);
assert.equal(monthlyRollups.total, 2);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('monthly rollups are grouped by calendar month', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -902,3 +1712,112 @@ test('flushSingle reuses cached prepared statements', async () => {
cleanupDbPath(dbPath);
}
});
test('reassignAnimeAnilist deduplicates cover blobs and getCoverArt remains compatible', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
const originalFetch = globalThis.fetch;
const sharedCoverBlob = Buffer.from([1, 2, 3, 4, 5, 6, 7, 8]);
try {
globalThis.fetch = async () =>
new Response(new Uint8Array(sharedCoverBlob), {
status: 200,
headers: { 'Content-Type': 'image/jpeg' },
});
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
const privateApi = tracker as unknown as { db: DatabaseSync };
privateApi.db.exec(`
INSERT INTO imm_anime (
anime_id,
normalized_title_key,
canonical_title,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'little witch academia',
'Little Witch Academia',
1000,
1000
);
INSERT INTO imm_videos (
video_id,
video_key,
canonical_title,
source_type,
duration_ms,
anime_id,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES
(
1,
'local:/tmp/lwa-1.mkv',
'Little Witch Academia S01E01',
1,
0,
1,
1000,
1000
),
(
2,
'local:/tmp/lwa-2.mkv',
'Little Witch Academia S01E02',
1,
0,
1,
1000,
1000
);
`);
await tracker.reassignAnimeAnilist(1, {
anilistId: 33489,
titleRomaji: 'Little Witch Academia',
titleEnglish: 'Little Witch Academia',
episodesTotal: 25,
coverUrl: 'https://example.com/lwa.jpg',
});
const blobRows = privateApi.db
.prepare('SELECT blob_hash AS blobHash, cover_blob AS coverBlob FROM imm_cover_art_blobs')
.all() as Array<{ blobHash: string; coverBlob: Buffer }>;
const mediaRows = privateApi.db
.prepare(
`
SELECT
video_id AS videoId,
cover_blob AS coverBlob,
cover_blob_hash AS coverBlobHash
FROM imm_media_art
ORDER BY video_id ASC
`,
)
.all() as Array<{
videoId: number;
coverBlob: Buffer | null;
coverBlobHash: string | null;
}>;
assert.equal(blobRows.length, 1);
assert.deepEqual(new Uint8Array(blobRows[0]!.coverBlob), new Uint8Array(sharedCoverBlob));
assert.equal(mediaRows.length, 2);
assert.equal(typeof mediaRows[0]?.coverBlobHash, 'string');
assert.equal(mediaRows[0]?.coverBlobHash, mediaRows[1]?.coverBlobHash);
const resolvedCover = await tracker.getCoverArt(2);
assert.ok(resolvedCover?.coverBlob);
assert.deepEqual(
new Uint8Array(resolvedCover?.coverBlob ?? Buffer.alloc(0)),
new Uint8Array(sharedCoverBlob),
);
} finally {
globalThis.fetch = originalFetch;
tracker?.destroy();
cleanupDbPath(dbPath);
}
});

View File

@@ -3,7 +3,11 @@ import * as fs from 'node:fs';
import { createLogger } from '../../logger';
import type { CoverArtFetcher } from './anilist/cover-art-fetcher';
import { getLocalVideoMetadata, guessAnimeVideoMetadata } from './immersion-tracker/metadata';
import { pruneRetention, runRollupMaintenance } from './immersion-tracker/maintenance';
import {
pruneRawRetention,
pruneRollupRetention,
runRollupMaintenance,
} from './immersion-tracker/maintenance';
import { Database, type DatabaseSync } from './immersion-tracker/sqlite';
import { finalizeSessionRecord, startSessionRecord } from './immersion-tracker/session';
import {
@@ -18,6 +22,12 @@ import {
updateVideoMetadataRecord,
updateVideoTitleRecord,
} from './immersion-tracker/storage';
import {
applySessionLifetimeSummary,
reconcileStaleActiveSessions,
rebuildLifetimeSummaries as rebuildLifetimeSummaryTables,
shouldBackfillLifetimeSummaries,
} from './immersion-tracker/lifetime';
import {
cleanupVocabularyStats,
getAnimeCoverArt,
@@ -56,6 +66,7 @@ import {
getWordDetail,
getWordOccurrences,
getVideoDurationMs,
upsertCoverArt,
markVideoWatched,
deleteSession as deleteSessionQuery,
deleteVideo as deleteVideoQuery,
@@ -82,6 +93,7 @@ import {
DEFAULT_MAX_PAYLOAD_BYTES,
DEFAULT_MONTHLY_ROLLUP_RETENTION_MS,
DEFAULT_QUEUE_CAP,
DEFAULT_SESSIONS_RETENTION_MS,
DEFAULT_TELEMETRY_RETENTION_MS,
DEFAULT_VACUUM_INTERVAL_MS,
EVENT_CARD_MINED,
@@ -103,6 +115,7 @@ import {
type KanjiOccurrenceRow,
type KanjiStatsRow,
type KanjiWordRow,
type LifetimeRebuildSummary,
type LegacyVocabularyPosResolution,
type LegacyVocabularyPosRow,
type AnimeAnilistEntryRow,
@@ -176,6 +189,7 @@ export class ImmersionTrackerService {
private readonly maxPayloadBytes: number;
private readonly eventsRetentionMs: number;
private readonly telemetryRetentionMs: number;
private readonly sessionsRetentionMs: number;
private readonly dailyRollupRetentionMs: number;
private readonly monthlyRollupRetentionMs: number;
private readonly vacuumIntervalMs: number;
@@ -230,44 +244,55 @@ export class ImmersionTrackerService {
);
const retention = policy.retention ?? {};
this.eventsRetentionMs =
resolveBoundedInt(
retention.eventsDays,
Math.floor(DEFAULT_EVENTS_RETENTION_MS / 86_400_000),
1,
3650,
) * 86_400_000;
this.telemetryRetentionMs =
resolveBoundedInt(
retention.telemetryDays,
Math.floor(DEFAULT_TELEMETRY_RETENTION_MS / 86_400_000),
1,
3650,
) * 86_400_000;
this.dailyRollupRetentionMs =
resolveBoundedInt(
retention.dailyRollupsDays,
Math.floor(DEFAULT_DAILY_ROLLUP_RETENTION_MS / 86_400_000),
1,
36500,
) * 86_400_000;
this.monthlyRollupRetentionMs =
resolveBoundedInt(
retention.monthlyRollupsDays,
Math.floor(DEFAULT_MONTHLY_ROLLUP_RETENTION_MS / 86_400_000),
1,
36500,
) * 86_400_000;
this.vacuumIntervalMs =
resolveBoundedInt(
retention.vacuumIntervalDays,
Math.floor(DEFAULT_VACUUM_INTERVAL_MS / 86_400_000),
1,
3650,
) * 86_400_000;
const daysToRetentionMs = (value: number | undefined, fallbackMs: number, maxDays: number): number => {
const fallbackDays = Math.floor(fallbackMs / 86_400_000);
const resolvedDays = resolveBoundedInt(value, fallbackDays, 0, maxDays);
return resolvedDays === 0 ? Number.POSITIVE_INFINITY : resolvedDays * 86_400_000;
};
this.eventsRetentionMs = daysToRetentionMs(retention.eventsDays, DEFAULT_EVENTS_RETENTION_MS, 3650);
this.telemetryRetentionMs = daysToRetentionMs(
retention.telemetryDays,
DEFAULT_TELEMETRY_RETENTION_MS,
3650,
);
this.sessionsRetentionMs = daysToRetentionMs(
retention.sessionsDays,
DEFAULT_SESSIONS_RETENTION_MS,
3650,
);
this.dailyRollupRetentionMs = daysToRetentionMs(
retention.dailyRollupsDays,
DEFAULT_DAILY_ROLLUP_RETENTION_MS,
36500,
);
this.monthlyRollupRetentionMs = daysToRetentionMs(
retention.monthlyRollupsDays,
DEFAULT_MONTHLY_ROLLUP_RETENTION_MS,
36500,
);
this.vacuumIntervalMs = daysToRetentionMs(
retention.vacuumIntervalDays,
DEFAULT_VACUUM_INTERVAL_MS,
3650,
);
this.db = new Database(this.dbPath);
applyPragmas(this.db);
ensureSchema(this.db);
const reconciledSessions = reconcileStaleActiveSessions(this.db);
if (reconciledSessions > 0) {
this.logger.info(
`Recovered stale active sessions on startup: reconciledSessions=${reconciledSessions}`,
);
}
if (shouldBackfillLifetimeSummaries(this.db)) {
const result = rebuildLifetimeSummaryTables(this.db);
if (result.appliedSessions > 0) {
this.logger.info(
`Backfilled lifetime summaries from retained sessions: appliedSessions=${result.appliedSessions}`,
);
}
}
this.preparedStatements = createTrackerPreparedStatements(this.db);
this.scheduleMaintenance();
this.scheduleFlush();
@@ -301,6 +326,11 @@ export class ImmersionTrackerService {
activeSessions: number;
episodesToday: number;
activeAnimeCount: number;
totalEpisodesWatched: number;
totalAnimeCompleted: number;
totalActiveMin: number;
totalCards: number;
activeDays: number;
}> {
return getQueryHints(this.db);
}
@@ -323,6 +353,12 @@ export class ImmersionTrackerService {
});
}
async rebuildLifetimeSummaries(): Promise<LifetimeRebuildSummary> {
this.flushTelemetry(true);
this.flushNow();
return rebuildLifetimeSummaryTables(this.db);
}
async getKanjiStats(limit = 100): Promise<KanjiStatsRow[]> {
return getKanjiStats(this.db, limit);
}
@@ -454,34 +490,21 @@ export class ImmersionTrackerService {
let coverBlob: Buffer | null = null;
try {
const res = await fetch(info.coverUrl);
if (res.ok) coverBlob = Buffer.from(await res.arrayBuffer());
if (res.ok) {
coverBlob = Buffer.from(await res.arrayBuffer());
}
} catch {
/* ignore */
}
for (const v of videos) {
this.db
.prepare(
`
INSERT INTO imm_media_art (video_id, anilist_id, cover_url, cover_blob, title_romaji, title_english, episodes_total, fetched_at_ms, CREATED_DATE, LAST_UPDATE_DATE)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(video_id) DO UPDATE SET
anilist_id = excluded.anilist_id, cover_url = excluded.cover_url, cover_blob = COALESCE(excluded.cover_blob, cover_blob),
title_romaji = excluded.title_romaji, title_english = excluded.title_english, episodes_total = excluded.episodes_total,
fetched_at_ms = excluded.fetched_at_ms, LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`,
)
.run(
v.video_id,
info.anilistId,
info.coverUrl,
coverBlob,
info.titleRomaji ?? null,
info.titleEnglish ?? null,
info.episodesTotal ?? null,
Date.now(),
Date.now(),
Date.now(),
);
upsertCoverArt(this.db, v.video_id, {
anilistId: info.anilistId,
coverUrl: info.coverUrl,
coverBlob,
titleRomaji: info.titleRomaji ?? null,
titleEnglish: info.titleEnglish ?? null,
episodesTotal: info.episodesTotal ?? null,
});
}
}
}
@@ -539,7 +562,7 @@ export class ImmersionTrackerService {
}
async ensureCoverArt(videoId: number): Promise<boolean> {
const existing = getCoverArt(this.db, videoId);
const existing = await this.getCoverArt(videoId);
if (existing?.coverBlob) {
return true;
}
@@ -557,7 +580,11 @@ export class ImmersionTrackerService {
if (!canonicalTitle) {
return false;
}
return await this.coverArtFetcher!.fetchIfMissing(this.db, videoId, canonicalTitle);
const fetched = await this.coverArtFetcher!.fetchIfMissing(this.db, videoId, canonicalTitle);
if (!fetched) {
return false;
}
return (await this.getCoverArt(videoId))?.coverBlob !== null;
})();
this.pendingCoverFetches.set(videoId, fetchPromise);
@@ -729,7 +756,6 @@ export class ImmersionTrackerService {
payloadJson: sanitizePayload(
{
event: 'subtitle-line',
text: cleaned,
words: metrics.words,
},
this.maxPayloadBytes,
@@ -1024,17 +1050,33 @@ export class ImmersionTrackerService {
this.flushTelemetry(true);
this.flushNow();
const nowMs = Date.now();
const retentionResult = pruneRetention(this.db, nowMs, {
eventsRetentionMs: this.eventsRetentionMs,
telemetryRetentionMs: this.telemetryRetentionMs,
dailyRollupRetentionMs: this.dailyRollupRetentionMs,
monthlyRollupRetentionMs: this.monthlyRollupRetentionMs,
});
const shouldRebuildRollups =
retentionResult.deletedTelemetryRows > 0 || retentionResult.deletedEndedSessions > 0;
this.runRollupMaintenance(shouldRebuildRollups);
this.runRollupMaintenance(false);
if (
Number.isFinite(this.eventsRetentionMs) ||
Number.isFinite(this.telemetryRetentionMs) ||
Number.isFinite(this.sessionsRetentionMs)
) {
pruneRawRetention(this.db, nowMs, {
eventsRetentionMs: this.eventsRetentionMs,
telemetryRetentionMs: this.telemetryRetentionMs,
sessionsRetentionMs: this.sessionsRetentionMs,
});
}
if (
Number.isFinite(this.dailyRollupRetentionMs) ||
Number.isFinite(this.monthlyRollupRetentionMs)
) {
pruneRollupRetention(this.db, nowMs, {
dailyRollupRetentionMs: this.dailyRollupRetentionMs,
monthlyRollupRetentionMs: this.monthlyRollupRetentionMs,
});
}
if (nowMs - this.lastVacuumMs >= this.vacuumIntervalMs && !this.writeLock.locked) {
if (
this.vacuumIntervalMs > 0 &&
nowMs - this.lastVacuumMs >= this.vacuumIntervalMs &&
!this.writeLock.locked
) {
this.db.exec('VACUUM');
this.lastVacuumMs = nowMs;
}
@@ -1097,6 +1139,7 @@ export class ImmersionTrackerService {
this.sessionState.pendingTelemetry = false;
finalizeSessionRecord(this.db, this.sessionState, endedAt);
applySessionLifetimeSummary(this.db, this.sessionState, endedAt);
this.sessionState = null;
}

View File

@@ -13,17 +13,26 @@ import {
} from '../storage.js';
import { startSessionRecord } from '../session.js';
import {
getAnimeDailyRollups,
cleanupVocabularyStats,
deleteSession,
getDailyRollups,
getQueryHints,
getMonthlyRollups,
getAnimeDetail,
getAnimeEpisodes,
getAnimeCoverArt,
getAnimeLibrary,
getCoverArt,
getMediaDetail,
getMediaLibrary,
getKanjiOccurrences,
getSessionSummaries,
getVocabularyStats,
getKanjiStats,
getSessionEvents,
getWordOccurrences,
upsertCoverArt,
} from '../query.js';
import { SOURCE_TYPE_LOCAL, EVENT_SUBTITLE_LINE } from '../types.js';
@@ -123,6 +132,85 @@ test('getSessionSummaries returns sessionId and canonicalTitle', () => {
}
});
test('getDailyRollups limits by distinct days (not rows)', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const insert = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`,
);
insert.run(10, 1, 1, 1, 0, 0, 0, 2);
insert.run(10, 2, 1, 1, 0, 0, 0, 3);
insert.run(9, 1, 1, 1, 0, 0, 0, 1);
insert.run(8, 1, 1, 1, 0, 0, 0, 1);
const rows = getDailyRollups(db, 2);
assert.equal(rows.length, 3);
assert.ok(rows.every((r) => r.rollupDayOrMonth === 10 || r.rollupDayOrMonth === 9));
assert.ok(rows.some((r) => r.rollupDayOrMonth === 10 && r.videoId === 1));
assert.ok(rows.some((r) => r.rollupDayOrMonth === 10 && r.videoId === 2));
assert.ok(rows.some((r) => r.rollupDayOrMonth === 9 && r.videoId === 1));
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getQueryHints reads all-time totals from lifetime summary', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
db.prepare(
`
UPDATE imm_lifetime_global
SET
total_sessions = ?,
total_active_ms = ?,
total_cards = ?,
active_days = ?,
episodes_completed = ?,
anime_completed = ?
WHERE global_id = 1
`,
).run(4, 90_000, 2, 9, 11, 22);
const insert = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`,
);
insert.run(10, 1, 1, 12, 0, 0, 0, 2);
insert.run(10, 2, 1, 11, 0, 0, 0, 3);
insert.run(9, 1, 1, 10, 0, 0, 0, 1);
const hints = getQueryHints(db);
assert.equal(hints.totalSessions, 4);
assert.equal(hints.totalCards, 2);
assert.equal(hints.totalActiveMin, 1);
assert.equal(hints.activeDays, 9);
assert.equal(hints.totalEpisodesWatched, 11);
assert.equal(hints.totalAnimeCompleted, 22);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getSessionSummaries with no telemetry returns zero aggregates', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -157,6 +245,59 @@ test('getSessionSummaries with no telemetry returns zero aggregates', () => {
}
});
test('getSessionSummaries uses denormalized session metrics for ended sessions without telemetry', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/ended-session-no-telemetry.mkv', {
canonicalTitle: 'Ended Session',
sourcePath: '/tmp/ended-session-no-telemetry.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const startedAtMs = 4_000_000;
const endedAtMs = startedAtMs + 8_000;
const { sessionId } = startSessionRecord(db, videoId, startedAtMs);
db.prepare(
`
UPDATE imm_sessions
SET
ended_at_ms = ?,
status = 2,
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
words_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
lookup_hits = ?,
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
).run(endedAtMs, 8_000, 7_000, 12, 34, 34, 5, 9, 6, endedAtMs, sessionId);
const rows = getSessionSummaries(db, 10);
const row = rows.find((r) => r.sessionId === sessionId);
assert.ok(row);
assert.equal(row.totalWatchedMs, 8_000);
assert.equal(row.activeWatchedMs, 7_000);
assert.equal(row.linesSeen, 12);
assert.equal(row.wordsSeen, 34);
assert.equal(row.tokensSeen, 34);
assert.equal(row.cardsMined, 5);
assert.equal(row.lookupCount, 9);
assert.equal(row.lookupHits, 6);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getVocabularyStats returns rows ordered by frequency descending', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -328,6 +469,129 @@ test('cleanupVocabularyStats repairs stored POS metadata and removes excluded im
}
});
test('getDailyRollups returns all rows for the most recent rollup days', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const insertRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, cards_per_hour, words_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
insertRollup.run(3_000, 1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_000, 2, 2, 10, 20, 30, 40, 3, 0.1, 0.2, 0.3);
insertRollup.run(2_999, 3, 1, 5, 10, 15, 20, 1, 0.1, 0.2, 0.3);
insertRollup.run(2_998, 4, 1, 5, 10, 15, 20, 1, 0.1, 0.2, 0.3);
const rows = getDailyRollups(db, 1);
assert.equal(rows.length, 2);
assert.equal(rows[0]?.rollupDayOrMonth, 3_000);
assert.equal(rows[0]?.videoId, 2);
assert.equal(rows[1]?.rollupDayOrMonth, 3_000);
assert.equal(rows[1]?.videoId, 1);
const twoRows = getDailyRollups(db, 2);
assert.equal(twoRows.length, 3);
assert.equal(twoRows[0]?.rollupDayOrMonth, 3_000);
assert.equal(twoRows[1]?.rollupDayOrMonth, 3_000);
assert.equal(twoRows[2]?.rollupDayOrMonth, 2_999);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getMonthlyRollups returns all rows for the most recent rollup months', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const insertRollup = db.prepare(
`
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const nowMs = Date.now();
insertRollup.run(202602, 1, 1, 10, 20, 30, 40, 5, nowMs, nowMs);
insertRollup.run(202602, 2, 1, 10, 20, 30, 40, 6, nowMs, nowMs);
insertRollup.run(202601, 3, 1, 5, 10, 15, 20, 2, nowMs, nowMs);
insertRollup.run(202600, 4, 1, 5, 10, 15, 20, 2, nowMs, nowMs);
const rows = getMonthlyRollups(db, 1);
assert.equal(rows.length, 2);
assert.equal(rows[0]?.rollupDayOrMonth, 202602);
assert.equal(rows[0]?.videoId, 2);
assert.equal(rows[1]?.rollupDayOrMonth, 202602);
assert.equal(rows[1]?.videoId, 1);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getAnimeDailyRollups returns all rows for the most recent rollup days', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const insertRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, cards_per_hour, words_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'Test Anime',
canonicalTitle: 'Test Anime',
anilistId: null,
titleRomaji: null,
titleEnglish: null,
titleNative: null,
metadataJson: null,
});
const video1 = getOrCreateVideoRecord(db, 'local:/tmp/anime-ep1.mkv', {
canonicalTitle: 'Episode 1',
sourcePath: '/tmp/anime-ep1.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const video2 = getOrCreateVideoRecord(db, 'local:/tmp/anime-ep2.mkv', {
canonicalTitle: 'Episode 2',
sourcePath: '/tmp/anime-ep2.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
db.prepare('UPDATE imm_videos SET anime_id = ? WHERE video_id IN (?, ?)').run(
animeId,
video1,
video2,
);
insertRollup.run(4_000, video1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(4_000, video2, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_999, video1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
const rows = getAnimeDailyRollups(db, animeId, 1);
assert.equal(rows.length, 2);
assert.equal(rows[0]?.rollupDayOrMonth, 4_000);
assert.equal(rows[0]?.videoId, video2);
assert.equal(rows[1]?.rollupDayOrMonth, 4_000);
assert.equal(rows[1]?.videoId, video1);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('cleanupVocabularyStats merges repaired duplicates instead of violating the imm_words unique key', async () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -423,6 +687,8 @@ test('cleanupVocabularyStats merges repaired duplicates instead of violating the
{
animeId: null,
animeTitle: null,
sourcePath: '/tmp/cleanup-merge.mkv',
secondaryText: null,
videoId,
videoTitle: 'Cleanup Merge',
sessionId,
@@ -843,6 +1109,46 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
1_031_000,
);
const now = Date.now();
db.prepare(
`
INSERT INTO imm_lifetime_anime (
anime_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
episodes_completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(lwaAnimeId, 3, 12_000, 6, 80, 33, 0, 2, 1, 1_000_000, 1_021_000, now, now);
db.prepare(
`
INSERT INTO imm_lifetime_anime (
anime_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
episodes_completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(frierenAnimeId, 1, 3_500, 1, 20, 8, 0, 1, 1, 1_030_000, 1_030_000, now, now);
const animeLibrary = getAnimeLibrary(db);
assert.equal(animeLibrary.length, 2);
assert.deepEqual(
@@ -923,6 +1229,464 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
}
});
test('anime library and detail still return lifetime rows without retained sessions', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'No Session Anime',
canonicalTitle: 'No Session Anime',
anilistId: 111_111,
titleRomaji: 'No Session Anime',
titleEnglish: 'No Session Anime',
titleNative: 'No Session Anime',
metadataJson: null,
});
const ep1 = getOrCreateVideoRecord(db, 'local:/tmp/no-session-ep1.mkv', {
canonicalTitle: 'Episode 1',
sourcePath: '/tmp/no-session-ep1.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const ep2 = getOrCreateVideoRecord(db, 'local:/tmp/no-session-ep2.mkv', {
canonicalTitle: 'Episode 2',
sourcePath: '/tmp/no-session-ep2.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
linkVideoToAnimeRecord(db, ep1, {
animeId,
parsedBasename: 'Episode 1',
parsedTitle: 'No Session Anime',
parsedSeason: 1,
parsedEpisode: 1,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: '{"episode":1}',
});
linkVideoToAnimeRecord(db, ep2, {
animeId,
parsedBasename: 'Episode 2',
parsedTitle: 'No Session Anime',
parsedSeason: 1,
parsedEpisode: 2,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: '{"episode":2}',
});
const now = Date.now();
db.prepare(
`
INSERT INTO imm_lifetime_anime (
anime_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
episodes_completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(animeId, 12, 4_500, 9, 200, 80, 15, 2, 2, 1_000_000, now, now, now);
const library = getAnimeLibrary(db);
assert.equal(library.length, 1);
assert.equal(library[0]?.animeId, animeId);
assert.equal(library[0]?.canonicalTitle, 'No Session Anime');
assert.equal(library[0]?.totalSessions, 12);
assert.equal(library[0]?.totalActiveMs, 4_500);
assert.equal(library[0]?.totalCards, 9);
assert.equal(library[0]?.episodeCount, 2);
const detail = getAnimeDetail(db, animeId);
assert.ok(detail);
assert.equal(detail?.animeId, animeId);
assert.equal(detail?.canonicalTitle, 'No Session Anime');
assert.equal(detail?.totalSessions, 12);
assert.equal(detail?.totalActiveMs, 4_500);
assert.equal(detail?.totalCards, 9);
assert.equal(detail?.totalWordsSeen, 200);
assert.equal(detail?.totalLinesSeen, 80);
assert.equal(detail?.episodeCount, 2);
assert.equal(detail?.totalLookupCount, 0);
assert.equal(detail?.totalLookupHits, 0);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('media library and detail queries read lifetime totals', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const mediaOne = getOrCreateVideoRecord(db, 'local:/tmp/media-one.mkv', {
canonicalTitle: 'Media One',
sourcePath: '/tmp/media-one.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const mediaTwo = getOrCreateVideoRecord(db, 'local:/tmp/media-two.mkv', {
canonicalTitle: 'Media Two',
sourcePath: '/tmp/media-two.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const insertLifetime = db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const now = Date.now();
const older = now - 10_000;
insertLifetime.run(mediaOne, 3, 12_000, 4, 180, 10, 20, 1, 1_000, now, now, now, now);
insertLifetime.run(mediaTwo, 1, 2_000, 2, 40, 4, 6, 0, 900, older, now, now);
const library = getMediaLibrary(db);
assert.equal(library.length, 2);
assert.deepEqual(
library.map((row) => ({
videoId: row.videoId,
canonicalTitle: row.canonicalTitle,
totalSessions: row.totalSessions,
totalActiveMs: row.totalActiveMs,
totalCards: row.totalCards,
totalWordsSeen: row.totalWordsSeen,
lastWatchedMs: row.lastWatchedMs,
hasCoverArt: row.hasCoverArt,
})),
[
{
videoId: mediaOne,
canonicalTitle: 'Media One',
totalSessions: 3,
totalActiveMs: 12_000,
totalCards: 4,
totalWordsSeen: 180,
lastWatchedMs: now,
hasCoverArt: 0,
},
{
videoId: mediaTwo,
canonicalTitle: 'Media Two',
totalSessions: 1,
totalActiveMs: 2_000,
totalCards: 2,
totalWordsSeen: 40,
lastWatchedMs: older,
hasCoverArt: 0,
},
],
);
const detail = getMediaDetail(db, mediaOne);
assert.ok(detail);
assert.equal(detail.totalSessions, 3);
assert.equal(detail.totalActiveMs, 12_000);
assert.equal(detail.totalCards, 4);
assert.equal(detail.totalWordsSeen, 180);
assert.equal(detail.totalLinesSeen, 10);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('cover art queries reuse a shared blob across duplicate anime art rows', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'Shared Blob Anime',
canonicalTitle: 'Shared Blob Anime',
anilistId: 42_424,
titleRomaji: 'Shared Blob Anime',
titleEnglish: 'Shared Blob Anime',
titleNative: null,
metadataJson: null,
});
const videoOne = getOrCreateVideoRecord(db, 'local:/tmp/shared-blob-1.mkv', {
canonicalTitle: 'Shared Blob 1',
sourcePath: '/tmp/shared-blob-1.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const videoTwo = getOrCreateVideoRecord(db, 'local:/tmp/shared-blob-2.mkv', {
canonicalTitle: 'Shared Blob 2',
sourcePath: '/tmp/shared-blob-2.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
linkVideoToAnimeRecord(db, videoOne, {
animeId,
parsedBasename: 'Shared Blob 1',
parsedTitle: 'Shared Blob Anime',
parsedSeason: 1,
parsedEpisode: 1,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: null,
});
linkVideoToAnimeRecord(db, videoTwo, {
animeId,
parsedBasename: 'Shared Blob 2',
parsedTitle: 'Shared Blob Anime',
parsedSeason: 1,
parsedEpisode: 2,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: null,
});
const now = Date.now();
db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, 1, 1000, 0, 0, 0, 0, 0, ?, ?, ?, ?)
`,
).run(videoOne, now, now, now, now);
db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, 1, 1000, 0, 0, 0, 0, 0, ?, ?, ?, ?)
`,
).run(videoTwo, now, now - 1, now, now);
upsertCoverArt(db, videoOne, {
anilistId: 42_424,
coverUrl: 'https://images.test/shared.jpg',
coverBlob: Buffer.from([1, 2, 3, 4]),
titleRomaji: 'Shared Blob Anime',
titleEnglish: 'Shared Blob Anime',
episodesTotal: 12,
});
upsertCoverArt(db, videoTwo, {
anilistId: 42_424,
coverUrl: 'https://images.test/shared.jpg',
coverBlob: Buffer.from([9, 9, 9, 9]),
titleRomaji: 'Shared Blob Anime',
titleEnglish: 'Shared Blob Anime',
episodesTotal: 12,
});
const artOne = getCoverArt(db, videoOne);
const artTwo = getCoverArt(db, videoTwo);
const animeArt = getAnimeCoverArt(db, animeId);
const library = getMediaLibrary(db);
assert.equal(artOne?.coverBlob?.length, 4);
assert.equal(artTwo?.coverBlob?.length, 4);
assert.deepEqual(artOne?.coverBlob, artTwo?.coverBlob);
assert.equal(animeArt?.coverBlob?.length, 4);
assert.deepEqual(
library.map((row) => ({
videoId: row.videoId,
hasCoverArt: row.hasCoverArt,
})),
[
{ videoId: videoOne, hasCoverArt: 1 },
{ videoId: videoTwo, hasCoverArt: 1 },
],
);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('anime/media detail and episode queries use ended-session metrics when telemetry rows are absent', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'Session Metrics Anime',
canonicalTitle: 'Session Metrics Anime',
anilistId: 999_001,
titleRomaji: 'Session Metrics Anime',
titleEnglish: 'Session Metrics Anime',
titleNative: 'Session Metrics Anime',
metadataJson: null,
});
const episodeOne = getOrCreateVideoRecord(db, 'local:/tmp/session-metrics-ep1.mkv', {
canonicalTitle: 'Episode 1',
sourcePath: '/tmp/session-metrics-ep1.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const episodeTwo = getOrCreateVideoRecord(db, 'local:/tmp/session-metrics-ep2.mkv', {
canonicalTitle: 'Episode 2',
sourcePath: '/tmp/session-metrics-ep2.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
linkVideoToAnimeRecord(db, episodeOne, {
animeId,
parsedBasename: 'session-metrics-ep1.mkv',
parsedTitle: 'Session Metrics Anime',
parsedSeason: 1,
parsedEpisode: 1,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: '{"episode":1}',
});
linkVideoToAnimeRecord(db, episodeTwo, {
animeId,
parsedBasename: 'session-metrics-ep2.mkv',
parsedTitle: 'Session Metrics Anime',
parsedSeason: 1,
parsedEpisode: 2,
parserSource: 'fallback',
parserConfidence: 1,
parseMetadataJson: '{"episode":2}',
});
const now = Date.now();
db.prepare(
`
INSERT INTO imm_lifetime_anime (
anime_id, total_sessions, total_active_ms, total_cards, total_words_seen, total_lines_seen,
total_tokens_seen, episodes_started, episodes_completed, first_watched_ms, last_watched_ms,
CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(animeId, 3, 12_000, 6, 60, 24, 60, 2, 2, 1_000_000, 1_020_000, now, now);
db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id, total_sessions, total_active_ms, total_cards, total_words_seen, total_lines_seen,
total_tokens_seen, completed, first_watched_ms, last_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(episodeOne, 2, 7_000, 3, 30, 12, 30, 1, 1_000_000, 1_010_000, now, now);
const s1 = startSessionRecord(db, episodeOne, 1_000_000).sessionId;
const s2 = startSessionRecord(db, episodeOne, 1_010_000).sessionId;
const s3 = startSessionRecord(db, episodeTwo, 1_020_000).sessionId;
const updateSession = db.prepare(
`
UPDATE imm_sessions
SET
ended_at_ms = ?,
status = 2,
active_watched_ms = ?,
cards_mined = ?,
words_seen = ?,
lookup_count = ?,
lookup_hits = ?,
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
);
updateSession.run(1_001_000, 3_000, 1, 10, 4, 3, now, s1);
updateSession.run(1_011_000, 4_000, 2, 20, 5, 4, now, s2);
updateSession.run(1_021_000, 5_000, 3, 30, 6, 5, now, s3);
const animeDetail = getAnimeDetail(db, animeId);
assert.ok(animeDetail);
assert.equal(animeDetail?.totalLookupCount, 15);
assert.equal(animeDetail?.totalLookupHits, 12);
const episodes = getAnimeEpisodes(db, animeId);
assert.deepEqual(
episodes.map((row) => ({
videoId: row.videoId,
totalSessions: row.totalSessions,
totalActiveMs: row.totalActiveMs,
totalCards: row.totalCards,
totalWordsSeen: row.totalWordsSeen,
})),
[
{
videoId: episodeOne,
totalSessions: 2,
totalActiveMs: 7_000,
totalCards: 3,
totalWordsSeen: 30,
},
{
videoId: episodeTwo,
totalSessions: 1,
totalActiveMs: 5_000,
totalCards: 3,
totalWordsSeen: 30,
},
],
);
const mediaDetail = getMediaDetail(db, episodeOne);
assert.ok(mediaDetail);
assert.equal(mediaDetail?.totalSessions, 2);
assert.equal(mediaDetail?.totalActiveMs, 7_000);
assert.equal(mediaDetail?.totalCards, 3);
assert.equal(mediaDetail?.totalWordsSeen, 30);
assert.equal(mediaDetail?.totalLookupCount, 9);
assert.equal(mediaDetail?.totalLookupHits, 7);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getWordOccurrences maps a normalized word back to anime, video, and subtitle line context', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -980,6 +1744,8 @@ test('getWordOccurrences maps a normalized word back to anime, video, and subtit
{
animeId,
animeTitle: 'Little Witch Academia',
sourcePath: '/tmp/Little Witch Academia S02E04.mkv',
secondaryText: null,
videoId,
videoTitle: 'Episode 4',
sessionId,
@@ -1053,6 +1819,8 @@ test('getKanjiOccurrences maps a kanji back to anime, video, and subtitle line c
{
animeId,
animeTitle: 'Frieren',
sourcePath: '/tmp/[SubsPlease] Frieren - 03 - Departure.mkv',
secondaryText: null,
videoId,
videoTitle: 'Episode 3',
sessionId,

View File

@@ -0,0 +1,582 @@
import type { DatabaseSync } from './sqlite';
import { finalizeSessionRecord } from './session';
import type { LifetimeRebuildSummary, SessionState } from './types';
interface TelemetryRow {
active_watched_ms: number | null;
cards_mined: number | null;
lines_seen: number | null;
tokens_seen: number | null;
words_seen: number | null;
}
interface VideoRow {
anime_id: number | null;
watched: number;
}
interface AnimeRow {
episodes_total: number | null;
}
function asPositiveNumber(value: number | null, fallback: number): number {
if (value === null || !Number.isFinite(value)) {
return fallback;
}
return Math.max(0, Math.floor(value));
}
interface ExistenceRow {
count: number;
}
interface LifetimeMediaStateRow {
completed: number;
}
interface LifetimeAnimeStateRow {
episodes_completed: number;
}
interface RetainedSessionRow {
sessionId: number;
videoId: number;
startedAtMs: number;
endedAtMs: number;
totalWatchedMs: number;
activeWatchedMs: number;
linesSeen: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
lookupCount: number;
lookupHits: number;
pauseCount: number;
pauseMs: number;
seekForwardCount: number;
seekBackwardCount: number;
mediaBufferEvents: number;
}
function hasRetainedPriorSession(
db: DatabaseSync,
videoId: number,
startedAtMs: number,
currentSessionId: number,
): boolean {
return (
Number(
(
db
.prepare(
`
SELECT COUNT(*) AS count
FROM imm_sessions
WHERE video_id = ?
AND (
started_at_ms < ?
OR (started_at_ms = ? AND session_id < ?)
)
`,
)
.get(videoId, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
)?.count ?? 0,
) > 0
);
}
function isFirstSessionForLocalDay(
db: DatabaseSync,
currentSessionId: number,
startedAtMs: number,
): boolean {
return (
(
db
.prepare(
`
SELECT COUNT(*) AS count
FROM imm_sessions
WHERE CAST(strftime('%s', started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400
= CAST(strftime('%s', ? / 1000, 'unixepoch', 'localtime') AS INTEGER) / 86400
AND (
started_at_ms < ?
OR (started_at_ms = ? AND session_id < ?)
)
`,
)
.get(startedAtMs, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
)?.count === 0
);
}
function resetLifetimeSummaries(db: DatabaseSync, nowMs: number): void {
db.exec(`
DELETE FROM imm_lifetime_anime;
DELETE FROM imm_lifetime_media;
DELETE FROM imm_lifetime_applied_sessions;
`);
db.prepare(
`
UPDATE imm_lifetime_global
SET
total_sessions = 0,
total_active_ms = 0,
total_cards = 0,
active_days = 0,
episodes_started = 0,
episodes_completed = 0,
anime_completed = 0,
last_rebuilt_ms = ?,
LAST_UPDATE_DATE = ?
WHERE global_id = 1
`,
).run(nowMs, nowMs);
}
function toRebuildSessionState(row: RetainedSessionRow): SessionState {
return {
sessionId: row.sessionId,
videoId: row.videoId,
startedAtMs: row.startedAtMs,
currentLineIndex: 0,
lastWallClockMs: row.endedAtMs,
lastMediaMs: null,
lastPauseStartMs: null,
isPaused: false,
pendingTelemetry: false,
markedWatched: false,
totalWatchedMs: Math.max(0, row.totalWatchedMs),
activeWatchedMs: Math.max(0, row.activeWatchedMs),
linesSeen: Math.max(0, row.linesSeen),
wordsSeen: Math.max(0, row.wordsSeen),
tokensSeen: Math.max(0, row.tokensSeen),
cardsMined: Math.max(0, row.cardsMined),
lookupCount: Math.max(0, row.lookupCount),
lookupHits: Math.max(0, row.lookupHits),
pauseCount: Math.max(0, row.pauseCount),
pauseMs: Math.max(0, row.pauseMs),
seekForwardCount: Math.max(0, row.seekForwardCount),
seekBackwardCount: Math.max(0, row.seekBackwardCount),
mediaBufferEvents: Math.max(0, row.mediaBufferEvents),
};
}
function getRetainedStaleActiveSessions(db: DatabaseSync): RetainedSessionRow[] {
return db
.prepare(
`
SELECT
s.session_id AS sessionId,
s.video_id AS videoId,
s.started_at_ms AS startedAtMs,
COALESCE(t.sample_ms, s.LAST_UPDATE_DATE, s.started_at_ms) AS endedAtMs,
COALESCE(t.total_watched_ms, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(t.active_watched_ms, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(t.lines_seen, s.lines_seen, 0) AS linesSeen,
COALESCE(t.words_seen, s.words_seen, 0) AS wordsSeen,
COALESCE(t.tokens_seen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(t.cards_mined, s.cards_mined, 0) AS cardsMined,
COALESCE(t.lookup_count, s.lookup_count, 0) AS lookupCount,
COALESCE(t.lookup_hits, s.lookup_hits, 0) AS lookupHits,
COALESCE(t.pause_count, s.pause_count, 0) AS pauseCount,
COALESCE(t.pause_ms, s.pause_ms, 0) AS pauseMs,
COALESCE(t.seek_forward_count, s.seek_forward_count, 0) AS seekForwardCount,
COALESCE(t.seek_backward_count, s.seek_backward_count, 0) AS seekBackwardCount,
COALESCE(t.media_buffer_events, s.media_buffer_events, 0) AS mediaBufferEvents
FROM imm_sessions s
LEFT JOIN imm_session_telemetry t
ON t.telemetry_id = (
SELECT telemetry_id
FROM imm_session_telemetry
WHERE session_id = s.session_id
ORDER BY sample_ms DESC, telemetry_id DESC
LIMIT 1
)
WHERE s.ended_at_ms IS NULL
ORDER BY s.started_at_ms ASC, s.session_id ASC
`,
)
.all() as RetainedSessionRow[];
}
function upsertLifetimeMedia(
db: DatabaseSync,
videoId: number,
nowMs: number,
activeMs: number,
cardsMined: number,
wordsSeen: number,
linesSeen: number,
tokensSeen: number,
completed: number,
startedAtMs: number,
endedAtMs: number,
): void {
db.prepare(
`
INSERT INTO imm_lifetime_media(
video_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(video_id) DO UPDATE SET
total_sessions = total_sessions + 1,
total_active_ms = total_active_ms + excluded.total_active_ms,
total_cards = total_cards + excluded.total_cards,
total_words_seen = total_words_seen + excluded.total_words_seen,
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
completed = MAX(completed, excluded.completed),
first_watched_ms = CASE
WHEN excluded.first_watched_ms IS NULL THEN first_watched_ms
WHEN first_watched_ms IS NULL THEN excluded.first_watched_ms
WHEN excluded.first_watched_ms < first_watched_ms THEN excluded.first_watched_ms
ELSE first_watched_ms
END,
last_watched_ms = CASE
WHEN excluded.last_watched_ms IS NULL THEN last_watched_ms
WHEN last_watched_ms IS NULL THEN excluded.last_watched_ms
WHEN excluded.last_watched_ms > last_watched_ms THEN excluded.last_watched_ms
ELSE last_watched_ms
END,
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`,
).run(
videoId,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
completed,
startedAtMs,
endedAtMs,
nowMs,
nowMs,
);
}
function upsertLifetimeAnime(
db: DatabaseSync,
animeId: number,
nowMs: number,
activeMs: number,
cardsMined: number,
wordsSeen: number,
linesSeen: number,
tokensSeen: number,
episodesStartedDelta: number,
episodesCompletedDelta: number,
startedAtMs: number,
endedAtMs: number,
): void {
db.prepare(
`
INSERT INTO imm_lifetime_anime(
anime_id,
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
episodes_completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(anime_id) DO UPDATE SET
total_sessions = total_sessions + 1,
total_active_ms = total_active_ms + excluded.total_active_ms,
total_cards = total_cards + excluded.total_cards,
total_words_seen = total_words_seen + excluded.total_words_seen,
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
episodes_started = episodes_started + excluded.episodes_started,
episodes_completed = episodes_completed + excluded.episodes_completed,
first_watched_ms = CASE
WHEN excluded.first_watched_ms IS NULL THEN first_watched_ms
WHEN first_watched_ms IS NULL THEN excluded.first_watched_ms
WHEN excluded.first_watched_ms < first_watched_ms THEN excluded.first_watched_ms
ELSE first_watched_ms
END,
last_watched_ms = CASE
WHEN excluded.last_watched_ms IS NULL THEN last_watched_ms
WHEN last_watched_ms IS NULL THEN excluded.last_watched_ms
WHEN excluded.last_watched_ms > last_watched_ms THEN excluded.last_watched_ms
ELSE last_watched_ms
END,
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`,
).run(
animeId,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
episodesStartedDelta,
episodesCompletedDelta,
startedAtMs,
endedAtMs,
nowMs,
nowMs,
);
}
export function applySessionLifetimeSummary(
db: DatabaseSync,
session: SessionState,
endedAtMs: number,
): void {
const applyResult = db
.prepare(
`
INSERT INTO imm_lifetime_applied_sessions (
session_id,
applied_at_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
?, ?, ?, ?
)
ON CONFLICT(session_id) DO NOTHING
`,
)
.run(session.sessionId, endedAtMs, Date.now(), Date.now());
if ((applyResult.changes ?? 0) <= 0) {
return;
}
const telemetry = db
.prepare(
`
SELECT
active_watched_ms,
cards_mined,
words_seen,
lines_seen,
tokens_seen
FROM imm_session_telemetry
WHERE session_id = ?
ORDER BY sample_ms DESC, telemetry_id DESC
LIMIT 1
`,
)
.get(session.sessionId) as TelemetryRow | null;
const video = db
.prepare('SELECT anime_id, watched FROM imm_videos WHERE video_id = ?')
.get(session.videoId) as VideoRow | null;
const mediaLifetime =
(db
.prepare('SELECT completed FROM imm_lifetime_media WHERE video_id = ?')
.get(session.videoId) as LifetimeMediaStateRow | null | undefined) ?? null;
const animeLifetime = video?.anime_id
? ((db
.prepare('SELECT episodes_completed FROM imm_lifetime_anime WHERE anime_id = ?')
.get(video.anime_id) as LifetimeAnimeStateRow | null | undefined) ?? null)
: null;
const anime = video?.anime_id
? ((db
.prepare('SELECT episodes_total FROM imm_anime WHERE anime_id = ?')
.get(video.anime_id) as AnimeRow | null | undefined) ?? null)
: null;
const activeMs = telemetry
? asPositiveNumber(telemetry.active_watched_ms, session.activeWatchedMs)
: session.activeWatchedMs;
const cardsMined = telemetry
? asPositiveNumber(telemetry.cards_mined, session.cardsMined)
: session.cardsMined;
const wordsSeen = telemetry
? asPositiveNumber(telemetry.words_seen, session.wordsSeen)
: session.wordsSeen;
const linesSeen = telemetry
? asPositiveNumber(telemetry.lines_seen, session.linesSeen)
: session.linesSeen;
const tokensSeen = telemetry
? asPositiveNumber(telemetry.tokens_seen, session.tokensSeen)
: session.tokensSeen;
const watched = video?.watched ?? 0;
const isFirstSessionForVideoRun =
mediaLifetime === null &&
!hasRetainedPriorSession(db, session.videoId, session.startedAtMs, session.sessionId);
const isFirstCompletedSessionForVideoRun =
watched > 0 && Number(mediaLifetime?.completed ?? 0) <= 0;
const isFirstSessionForDay = isFirstSessionForLocalDay(
db,
session.sessionId,
session.startedAtMs,
);
const episodesCompletedBefore = Number(animeLifetime?.episodes_completed ?? 0);
const animeEpisodesTotal = anime?.episodes_total ?? null;
const animeCompletedDelta =
watched > 0 &&
isFirstCompletedSessionForVideoRun &&
animeEpisodesTotal !== null &&
animeEpisodesTotal > 0 &&
episodesCompletedBefore < animeEpisodesTotal &&
episodesCompletedBefore + 1 >= animeEpisodesTotal
? 1
: 0;
const nowMs = Date.now();
db.prepare(
`
UPDATE imm_lifetime_global
SET
total_sessions = total_sessions + 1,
total_active_ms = total_active_ms + ?,
total_cards = total_cards + ?,
active_days = active_days + ?,
episodes_started = episodes_started + ?,
episodes_completed = episodes_completed + ?,
anime_completed = anime_completed + ?,
LAST_UPDATE_DATE = ?
WHERE global_id = 1
`,
).run(
activeMs,
cardsMined,
isFirstSessionForDay ? 1 : 0,
isFirstSessionForVideoRun ? 1 : 0,
isFirstCompletedSessionForVideoRun ? 1 : 0,
animeCompletedDelta,
nowMs,
);
upsertLifetimeMedia(
db,
session.videoId,
nowMs,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
watched > 0 ? 1 : 0,
session.startedAtMs,
endedAtMs,
);
if (video?.anime_id) {
upsertLifetimeAnime(
db,
video.anime_id,
nowMs,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
isFirstSessionForVideoRun ? 1 : 0,
isFirstCompletedSessionForVideoRun ? 1 : 0,
session.startedAtMs,
endedAtMs,
);
}
}
export function rebuildLifetimeSummaries(db: DatabaseSync): LifetimeRebuildSummary {
const rebuiltAtMs = Date.now();
const sessions = db
.prepare(
`
SELECT
session_id AS sessionId,
video_id AS videoId,
started_at_ms AS startedAtMs,
ended_at_ms AS endedAtMs,
total_watched_ms AS totalWatchedMs,
active_watched_ms AS activeWatchedMs,
lines_seen AS linesSeen,
words_seen AS wordsSeen,
tokens_seen AS tokensSeen,
cards_mined AS cardsMined,
lookup_count AS lookupCount,
lookup_hits AS lookupHits,
pause_count AS pauseCount,
pause_ms AS pauseMs,
seek_forward_count AS seekForwardCount,
seek_backward_count AS seekBackwardCount,
media_buffer_events AS mediaBufferEvents
FROM imm_sessions
WHERE ended_at_ms IS NOT NULL
ORDER BY started_at_ms ASC, session_id ASC
`,
)
.all() as RetainedSessionRow[];
db.exec('BEGIN');
try {
resetLifetimeSummaries(db, rebuiltAtMs);
for (const session of sessions) {
applySessionLifetimeSummary(db, toRebuildSessionState(session), session.endedAtMs);
}
db.exec('COMMIT');
} catch (error) {
db.exec('ROLLBACK');
throw error;
}
return {
appliedSessions: sessions.length,
rebuiltAtMs,
};
}
export function reconcileStaleActiveSessions(db: DatabaseSync): number {
const sessions = getRetainedStaleActiveSessions(db);
if (sessions.length === 0) {
return 0;
}
db.exec('BEGIN');
try {
for (const session of sessions) {
const state = toRebuildSessionState(session);
finalizeSessionRecord(db, state, session.endedAtMs);
applySessionLifetimeSummary(db, state, session.endedAtMs);
}
db.exec('COMMIT');
} catch (error) {
db.exec('ROLLBACK');
throw error;
}
return sessions.length;
}
export function shouldBackfillLifetimeSummaries(db: DatabaseSync): boolean {
const globalRow = db
.prepare('SELECT total_sessions AS totalSessions FROM imm_lifetime_global WHERE global_id = 1')
.get() as { totalSessions: number } | null;
const appliedRow = db
.prepare('SELECT COUNT(*) AS count FROM imm_lifetime_applied_sessions')
.get() as ExistenceRow | null;
const endedRow = db
.prepare('SELECT COUNT(*) AS count FROM imm_sessions WHERE ended_at_ms IS NOT NULL')
.get() as ExistenceRow | null;
const totalSessions = Number(globalRow?.totalSessions ?? 0);
const appliedSessions = Number(appliedRow?.count ?? 0);
const retainedEndedSessions = Number(endedRow?.count ?? 0);
return retainedEndedSessions > 0 && (appliedSessions === 0 || totalSessions === 0);
}

View File

@@ -0,0 +1,181 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { Database } from './sqlite';
import { pruneRawRetention, pruneRollupRetention, toMonthKey } from './maintenance';
import { ensureSchema } from './storage';
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-maintenance-test-'));
return path.join(dir, 'tracker.db');
}
function cleanupDbPath(dbPath: string): void {
try {
fs.rmSync(path.dirname(dbPath), { recursive: true, force: true });
} catch {
// best effort
}
}
test('pruneRawRetention uses session retention separately from telemetry retention', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const nowMs = 90 * 86_400_000;
const staleEndedAtMs = nowMs - 40 * 86_400_000;
const keptEndedAtMs = nowMs - 5 * 86_400_000;
db.exec(`
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES
(1, 'session-1', 1, ${staleEndedAtMs - 1_000}, ${staleEndedAtMs}, 2, ${staleEndedAtMs}, ${staleEndedAtMs}),
(2, 'session-2', 1, ${keptEndedAtMs - 1_000}, ${keptEndedAtMs}, 2, ${keptEndedAtMs}, ${keptEndedAtMs});
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES
(1, ${nowMs - 2 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}),
(2, ${nowMs - 12 * 60 * 60 * 1000}, 0, 0, ${nowMs}, ${nowMs});
`);
const result = pruneRawRetention(db, nowMs, {
eventsRetentionMs: 7 * 86_400_000,
telemetryRetentionMs: 1 * 86_400_000,
sessionsRetentionMs: 30 * 86_400_000,
});
const remainingSessions = db
.prepare('SELECT session_id FROM imm_sessions ORDER BY session_id')
.all() as Array<{ session_id: number }>;
const remainingTelemetry = db
.prepare('SELECT session_id FROM imm_session_telemetry ORDER BY session_id')
.all() as Array<{ session_id: number }>;
assert.equal(result.deletedTelemetryRows, 1);
assert.equal(result.deletedEndedSessions, 1);
assert.deepEqual(
remainingSessions.map((row) => row.session_id),
[2],
);
assert.deepEqual(
remainingTelemetry.map((row) => row.session_id),
[2],
);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('raw retention keeps rollups and rollup retention prunes them separately', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const nowMs = Date.UTC(2026, 2, 16, 12, 0, 0, 0);
const oldDay = Math.floor((nowMs - 90 * 86_400_000) / 86_400_000);
const oldMonth = toMonthKey(nowMs - 400 * 86_400_000);
db.exec(`
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'session-1', 1, ${nowMs - 90 * 86_400_000}, ${nowMs - 90 * 86_400_000 + 1_000}, 2, ${nowMs}, ${nowMs}
);
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, ${nowMs - 90 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards
) VALUES (
${oldDay}, 1, 1, 10, 1, 1, 1, 1
);
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
${oldMonth}, 1, 1, 10, 1, 1, 1, 1, ${nowMs}, ${nowMs}
);
`);
pruneRawRetention(db, nowMs, {
eventsRetentionMs: 7 * 86_400_000,
telemetryRetentionMs: 30 * 86_400_000,
sessionsRetentionMs: 30 * 86_400_000,
});
const rollupsAfterRawPrune = db
.prepare('SELECT COUNT(*) AS total FROM imm_daily_rollups')
.get() as { total: number } | null;
const monthlyAfterRawPrune = db
.prepare('SELECT COUNT(*) AS total FROM imm_monthly_rollups')
.get() as { total: number } | null;
assert.equal(rollupsAfterRawPrune?.total, 1);
assert.equal(monthlyAfterRawPrune?.total, 1);
const rollupPrune = pruneRollupRetention(db, nowMs, {
dailyRollupRetentionMs: 30 * 86_400_000,
monthlyRollupRetentionMs: 365 * 86_400_000,
});
const rollupsAfterRollupPrune = db
.prepare('SELECT COUNT(*) AS total FROM imm_daily_rollups')
.get() as { total: number } | null;
const monthlyAfterRollupPrune = db
.prepare('SELECT COUNT(*) AS total FROM imm_monthly_rollups')
.get() as { total: number } | null;
assert.equal(rollupPrune.deletedDailyRows, 1);
assert.equal(rollupPrune.deletedMonthlyRows, 1);
assert.equal(rollupsAfterRollupPrune?.total, 0);
assert.equal(monthlyAfterRollupPrune?.total, 0);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('ensureSchema adds sample_ms index for telemetry rollup scans', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const indexes = db
.prepare("PRAGMA index_list('imm_session_telemetry')")
.all() as Array<{ name: string }>;
const hasSampleMsIndex = indexes.some((row) => row.name === 'idx_telemetry_sample_ms');
assert.equal(hasSampleMsIndex, true);
const indexColumns = db
.prepare("PRAGMA index_info('idx_telemetry_sample_ms')")
.all() as Array<{ name: string }>;
assert.deepEqual(
indexColumns.map((column) => column.name),
['sample_ms'],
);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});

View File

@@ -18,11 +18,9 @@ interface RollupTelemetryResult {
maxSampleMs: number | null;
}
interface RetentionResult {
interface RawRetentionResult {
deletedSessionEvents: number;
deletedTelemetryRows: number;
deletedDailyRows: number;
deletedMonthlyRows: number;
deletedEndedSessions: number;
}
@@ -31,20 +29,18 @@ export function toMonthKey(timestampMs: number): number {
return monthDate.getUTCFullYear() * 100 + monthDate.getUTCMonth() + 1;
}
export function pruneRetention(
export function pruneRawRetention(
db: DatabaseSync,
nowMs: number,
policy: {
eventsRetentionMs: number;
telemetryRetentionMs: number;
dailyRollupRetentionMs: number;
monthlyRollupRetentionMs: number;
sessionsRetentionMs: number;
},
): RetentionResult {
): RawRetentionResult {
const eventCutoff = nowMs - policy.eventsRetentionMs;
const telemetryCutoff = nowMs - policy.telemetryRetentionMs;
const dayCutoff = nowMs - policy.dailyRollupRetentionMs;
const monthCutoff = nowMs - policy.monthlyRollupRetentionMs;
const sessionsCutoff = nowMs - policy.sessionsRetentionMs;
const deletedSessionEvents = (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(eventCutoff) as {
@@ -56,28 +52,49 @@ export function pruneRetention(
changes: number;
}
).changes;
const deletedDailyRows = (
db
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
.run(Math.floor(dayCutoff / DAILY_MS)) as { changes: number }
).changes;
const deletedMonthlyRows = (
db
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
.run(toMonthKey(monthCutoff)) as { changes: number }
).changes;
const deletedEndedSessions = (
db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(telemetryCutoff) as { changes: number }
.run(sessionsCutoff) as { changes: number }
).changes;
return {
deletedSessionEvents,
deletedTelemetryRows,
deletedEndedSessions,
};
}
export function pruneRollupRetention(
db: DatabaseSync,
nowMs: number,
policy: {
dailyRollupRetentionMs: number;
monthlyRollupRetentionMs: number;
},
): { deletedDailyRows: number; deletedMonthlyRows: number } {
const deletedDailyRows = Number.isFinite(policy.dailyRollupRetentionMs)
? (
db
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
.run(Math.floor((nowMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as {
changes: number;
}
).changes
: 0;
const deletedMonthlyRows = Number.isFinite(policy.monthlyRollupRetentionMs)
? (
db
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
.run(toMonthKey(nowMs - policy.monthlyRollupRetentionMs)) as {
changes: number;
}
).changes
: 0;
return {
deletedDailyRows,
deletedMonthlyRows,
deletedEndedSessions,
};
}

View File

@@ -1,3 +1,4 @@
import { createHash } from 'node:crypto';
import type { DatabaseSync } from './sqlite';
import type {
AnimeAnilistEntryRow,
@@ -29,6 +30,7 @@ import type {
WordOccurrenceRow,
VocabularyStatsRow,
} from './types';
import { buildCoverBlobReference, normalizeCoverBlobBytes } from './storage';
import { PartOfSpeech, type MergedToken } from '../../../types';
import { shouldExcludeTokenFromVocabularyPersistence } from '../tokenizer/annotation-stage';
import { deriveStoredPartOfSpeech } from '../tokenizer/part-of-speech';
@@ -68,8 +70,94 @@ type CleanupVocabularyStatsOptions = {
} | null>;
};
const ACTIVE_SESSION_METRICS_CTE = `
WITH active_session_metrics AS (
SELECT
t.session_id AS sessionId,
MAX(t.total_watched_ms) AS totalWatchedMs,
MAX(t.active_watched_ms) AS activeWatchedMs,
MAX(t.lines_seen) AS linesSeen,
MAX(t.words_seen) AS wordsSeen,
MAX(t.tokens_seen) AS tokensSeen,
MAX(t.cards_mined) AS cardsMined,
MAX(t.lookup_count) AS lookupCount,
MAX(t.lookup_hits) AS lookupHits
FROM imm_session_telemetry t
JOIN imm_sessions s ON s.session_id = t.session_id
WHERE s.ended_at_ms IS NULL
GROUP BY t.session_id
)
`;
function resolvedCoverBlobExpr(mediaAlias: string, blobStoreAlias: string): string {
return `COALESCE(${blobStoreAlias}.cover_blob, CASE WHEN ${mediaAlias}.cover_blob_hash IS NULL THEN ${mediaAlias}.cover_blob ELSE NULL END)`;
}
function cleanupUnusedCoverArtBlobHash(db: DatabaseSync, blobHash: string | null): void {
if (!blobHash) {
return;
}
db.prepare(
`
DELETE FROM imm_cover_art_blobs
WHERE blob_hash = ?
AND NOT EXISTS (
SELECT 1
FROM imm_media_art
WHERE cover_blob_hash = ?
)
`,
).run(blobHash, blobHash);
}
function findSharedCoverBlobHash(
db: DatabaseSync,
videoId: number,
anilistId: number | null,
coverUrl: string | null,
): string | null {
if (anilistId !== null) {
const byAnilist = db
.prepare(
`
SELECT cover_blob_hash AS coverBlobHash
FROM imm_media_art
WHERE video_id != ?
AND anilist_id = ?
AND cover_blob_hash IS NOT NULL
ORDER BY fetched_at_ms DESC, video_id DESC
LIMIT 1
`,
)
.get(videoId, anilistId) as { coverBlobHash: string | null } | undefined;
if (byAnilist?.coverBlobHash) {
return byAnilist.coverBlobHash;
}
}
if (coverUrl) {
const byUrl = db
.prepare(
`
SELECT cover_blob_hash AS coverBlobHash
FROM imm_media_art
WHERE video_id != ?
AND cover_url = ?
AND cover_blob_hash IS NOT NULL
ORDER BY fetched_at_ms DESC, video_id DESC
LIMIT 1
`,
)
.get(videoId, coverUrl) as { coverBlobHash: string | null } | undefined;
return byUrl?.coverBlobHash ?? null;
}
return null;
}
export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummaryQueryRow[] {
const prepared = db.prepare(`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
s.session_id AS sessionId,
s.video_id AS videoId,
@@ -78,19 +166,18 @@ export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummar
a.canonical_title AS animeTitle,
s.started_at_ms AS startedAtMs,
s.ended_at_ms AS endedAtMs,
COALESCE(MAX(t.total_watched_ms), 0) AS totalWatchedMs,
COALESCE(MAX(t.active_watched_ms), 0) AS activeWatchedMs,
COALESCE(MAX(t.lines_seen), 0) AS linesSeen,
COALESCE(MAX(t.words_seen), 0) AS wordsSeen,
COALESCE(MAX(t.tokens_seen), 0) AS tokensSeen,
COALESCE(MAX(t.cards_mined), 0) AS cardsMined,
COALESCE(MAX(t.lookup_count), 0) AS lookupCount,
COALESCE(MAX(t.lookup_hits), 0) AS lookupHits
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,
COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits
FROM imm_sessions s
LEFT JOIN imm_session_telemetry t ON t.session_id = s.session_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
LEFT JOIN imm_videos v ON v.video_id = s.video_id
LEFT JOIN imm_anime a ON a.anime_id = v.anime_id
GROUP BY s.session_id
ORDER BY s.started_at_ms DESC
LIMIT ?
`);
@@ -126,11 +213,34 @@ export function getQueryHints(db: DatabaseSync): {
activeAnimeCount: number;
totalEpisodesWatched: number;
totalAnimeCompleted: number;
totalActiveMin: number;
totalCards: number;
activeDays: number;
} {
const sessions = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions');
const active = db.prepare('SELECT COUNT(*) AS total FROM imm_sessions WHERE ended_at_ms IS NULL');
const totalSessions = Number((sessions.get() as { total?: number } | null)?.total ?? 0);
const activeSessions = Number((active.get() as { total?: number } | null)?.total ?? 0);
const lifetime = db
.prepare(
`
SELECT
total_sessions AS totalSessions,
total_active_ms AS totalActiveMs,
total_cards AS totalCards,
active_days AS activeDays,
episodes_completed AS episodesCompleted,
anime_completed AS animeCompleted
FROM imm_lifetime_global
WHERE global_id = 1
`,
)
.get() as {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
activeDays: number;
episodesCompleted: number;
animeCompleted: number;
} | null;
const now = new Date();
const todayLocal = Math.floor(
@@ -165,35 +275,14 @@ export function getQueryHints(db: DatabaseSync): {
.get(thirtyDaysAgoMs) as { count: number }
)?.count ?? 0;
const totalEpisodesWatched =
(
db
.prepare(
`
SELECT COUNT(*) AS count FROM imm_videos WHERE watched = 1
`,
)
.get() as { count: number }
)?.count ?? 0;
const totalEpisodesWatched = Number(lifetime?.episodesCompleted ?? 0);
const totalAnimeCompleted =
(
db
.prepare(
`
SELECT COUNT(*) AS count FROM (
SELECT a.anime_id
FROM imm_anime a
JOIN imm_videos v ON v.anime_id = a.anime_id
JOIN imm_media_art m ON m.video_id = v.video_id
WHERE m.episodes_total IS NOT NULL AND m.episodes_total > 0
GROUP BY a.anime_id
HAVING COUNT(DISTINCT CASE WHEN v.watched = 1 THEN v.video_id END) >= MAX(m.episodes_total)
)
`,
)
.get() as { count: number }
)?.count ?? 0;
const totalAnimeCompleted = Number(lifetime?.animeCompleted ?? 0);
const totalSessions = Number(lifetime?.totalSessions ?? 0);
const totalActiveMin = Math.floor(Math.max(0, lifetime?.totalActiveMs ?? 0) / 60000);
const totalCards = Number(lifetime?.totalCards ?? 0);
const activeDays = Number(lifetime?.activeDays ?? 0);
return {
totalSessions,
@@ -202,32 +291,48 @@ export function getQueryHints(db: DatabaseSync): {
activeAnimeCount,
totalEpisodesWatched,
totalAnimeCompleted,
totalActiveMin,
totalCards,
activeDays,
};
}
export function getDailyRollups(db: DatabaseSync, limit = 60): ImmersionSessionRollupRow[] {
const prepared = db.prepare(`
WITH recent_days AS (
SELECT DISTINCT rollup_day
FROM imm_daily_rollups
ORDER BY rollup_day DESC
LIMIT ?
)
SELECT
rollup_day AS rollupDayOrMonth,
video_id AS videoId,
total_sessions AS totalSessions,
total_active_min AS totalActiveMin,
total_lines_seen AS totalLinesSeen,
total_words_seen AS totalWordsSeen,
total_tokens_seen AS totalTokensSeen,
total_cards AS totalCards,
cards_per_hour AS cardsPerHour,
words_per_min AS wordsPerMin,
lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups
ORDER BY rollup_day DESC, video_id DESC
LIMIT ?
`);
r.rollup_day AS rollupDayOrMonth,
r.video_id AS videoId,
r.total_sessions AS totalSessions,
r.total_active_min AS totalActiveMin,
r.total_lines_seen AS totalLinesSeen,
r.total_words_seen AS totalWordsSeen,
r.total_tokens_seen AS totalTokensSeen,
r.total_cards AS totalCards,
r.cards_per_hour AS cardsPerHour,
r.words_per_min AS wordsPerMin,
r.lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups r
WHERE r.rollup_day IN (SELECT rollup_day FROM recent_days)
ORDER BY r.rollup_day DESC, r.video_id DESC
`);
return prepared.all(limit) as unknown as ImmersionSessionRollupRow[];
}
export function getMonthlyRollups(db: DatabaseSync, limit = 24): ImmersionSessionRollupRow[] {
const prepared = db.prepare(`
WITH recent_months AS (
SELECT DISTINCT rollup_month
FROM imm_monthly_rollups
ORDER BY rollup_month DESC
LIMIT ?
)
SELECT
rollup_month AS rollupDayOrMonth,
video_id AS videoId,
@@ -241,8 +346,8 @@ export function getMonthlyRollups(db: DatabaseSync, limit = 24): ImmersionSessio
0 AS wordsPerMin,
0 AS lookupHitRate
FROM imm_monthly_rollups
WHERE rollup_month IN (SELECT rollup_month FROM recent_months)
ORDER BY rollup_month DESC, video_id DESC
LIMIT ?
`);
return prepared.all(limit) as unknown as ImmersionSessionRollupRow[];
}
@@ -652,27 +757,18 @@ export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] {
a.anime_id AS animeId,
a.canonical_title AS canonicalTitle,
a.anilist_id AS anilistId,
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(sm.max_active_ms), 0) AS totalActiveMs,
COALESCE(SUM(sm.max_cards), 0) AS totalCards,
COALESCE(SUM(sm.max_words), 0) AS totalWordsSeen,
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COUNT(DISTINCT v.video_id) AS episodeCount,
a.episodes_total AS episodesTotal,
MAX(s.started_at_ms) AS lastWatchedMs
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs
FROM imm_anime a
JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id
JOIN imm_videos v ON v.anime_id = a.anime_id
JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.cards_mined) AS max_cards,
MAX(t.words_seen) AS max_words
FROM imm_session_telemetry t
GROUP BY t.session_id
) sm ON sm.session_id = s.session_id
GROUP BY a.anime_id
ORDER BY totalActiveMs DESC, lastWatchedMs DESC, canonicalTitle ASC
ORDER BY totalActiveMs DESC, lm.last_watched_ms DESC, canonicalTitle ASC
`,
)
.all() as unknown as AnimeLibraryRow[];
@@ -682,6 +778,7 @@ export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRo
return db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
a.anime_id AS animeId,
a.canonical_title AS canonicalTitle,
@@ -690,30 +787,20 @@ export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRo
a.title_english AS titleEnglish,
a.title_native AS titleNative,
a.description AS description,
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(sm.max_active_ms), 0) AS totalActiveMs,
COALESCE(SUM(sm.max_cards), 0) AS totalCards,
COALESCE(SUM(sm.max_words), 0) AS totalWordsSeen,
COALESCE(SUM(sm.max_lines), 0) AS totalLinesSeen,
COALESCE(SUM(sm.max_lookups), 0) AS totalLookupCount,
COALESCE(SUM(sm.max_hits), 0) AS totalLookupHits,
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen,
COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount,
COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits,
COUNT(DISTINCT v.video_id) AS episodeCount,
MAX(s.started_at_ms) AS lastWatchedMs
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs
FROM imm_anime a
JOIN imm_lifetime_anime lm ON lm.anime_id = a.anime_id
JOIN imm_videos v ON v.anime_id = a.anime_id
JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.cards_mined) AS max_cards,
MAX(t.words_seen) AS max_words,
MAX(t.lines_seen) AS max_lines,
MAX(t.lookup_count) AS max_lookups,
MAX(t.lookup_hits) AS max_hits
FROM imm_session_telemetry t
GROUP BY t.session_id
) sm ON sm.session_id = s.session_id
LEFT JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
WHERE a.anime_id = ?
GROUP BY a.anime_id
`,
@@ -744,6 +831,7 @@ export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisod
return db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
v.anime_id AS animeId,
v.video_id AS videoId,
@@ -754,21 +842,13 @@ export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisod
v.duration_ms AS durationMs,
v.watched AS watched,
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(sm.max_active_ms), 0) AS totalActiveMs,
COALESCE(SUM(sm.max_cards), 0) AS totalCards,
COALESCE(SUM(sm.max_words), 0) AS totalWordsSeen,
COALESCE(SUM(COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0)), 0) AS totalActiveMs,
COALESCE(SUM(COALESCE(asm.cardsMined, s.cards_mined, 0)), 0) AS totalCards,
COALESCE(SUM(COALESCE(asm.wordsSeen, s.words_seen, 0)), 0) AS totalWordsSeen,
MAX(s.started_at_ms) AS lastWatchedMs
FROM imm_videos v
JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.cards_mined) AS max_cards,
MAX(t.words_seen) AS max_words
FROM imm_session_telemetry t
GROUP BY t.session_id
) sm ON sm.session_id = s.session_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
WHERE v.anime_id = ?
GROUP BY v.video_id
ORDER BY
@@ -789,26 +869,19 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
SELECT
v.video_id AS videoId,
v.canonical_title AS canonicalTitle,
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(sm.max_active_ms), 0) AS totalActiveMs,
COALESCE(SUM(sm.max_cards), 0) AS totalCards,
COALESCE(SUM(sm.max_words), 0) AS totalWordsSeen,
MAX(s.started_at_ms) AS lastWatchedMs,
CASE WHEN ma.cover_blob IS NOT NULL THEN 1 ELSE 0 END AS hasCoverArt
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs,
CASE
WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1
ELSE 0
END AS hasCoverArt
FROM imm_videos v
JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.cards_mined) AS max_cards,
MAX(t.words_seen) AS max_words
FROM imm_session_telemetry t
GROUP BY t.session_id
) sm ON sm.session_id = s.session_id
JOIN imm_lifetime_media lm ON lm.video_id = v.video_id
LEFT JOIN imm_media_art ma ON ma.video_id = v.video_id
GROUP BY v.video_id
ORDER BY lastWatchedMs DESC
ORDER BY lm.last_watched_ms DESC
`,
)
.all() as unknown as MediaLibraryRow[];
@@ -818,30 +891,21 @@ export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRo
return db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
v.video_id AS videoId,
v.canonical_title AS canonicalTitle,
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(sm.max_active_ms), 0) AS totalActiveMs,
COALESCE(SUM(sm.max_cards), 0) AS totalCards,
COALESCE(SUM(sm.max_words), 0) AS totalWordsSeen,
COALESCE(SUM(sm.max_lines), 0) AS totalLinesSeen,
COALESCE(SUM(sm.max_lookups), 0) AS totalLookupCount,
COALESCE(SUM(sm.max_hits), 0) AS totalLookupHits
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen,
COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount,
COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits
FROM imm_videos v
JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.cards_mined) AS max_cards,
MAX(t.words_seen) AS max_words,
MAX(t.lines_seen) AS max_lines,
MAX(t.lookup_count) AS max_lookups,
MAX(t.lookup_hits) AS max_hits
FROM imm_session_telemetry t
GROUP BY t.session_id
) sm ON sm.session_id = s.session_id
JOIN imm_lifetime_media lm ON lm.video_id = v.video_id
LEFT JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
WHERE v.video_id = ?
GROUP BY v.video_id
`,
@@ -857,25 +921,25 @@ export function getMediaSessions(
return db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
s.session_id AS sessionId,
s.video_id AS videoId,
v.canonical_title AS canonicalTitle,
s.started_at_ms AS startedAtMs,
s.ended_at_ms AS endedAtMs,
COALESCE(MAX(t.total_watched_ms), 0) AS totalWatchedMs,
COALESCE(MAX(t.active_watched_ms), 0) AS activeWatchedMs,
COALESCE(MAX(t.lines_seen), 0) AS linesSeen,
COALESCE(MAX(t.words_seen), 0) AS wordsSeen,
COALESCE(MAX(t.tokens_seen), 0) AS tokensSeen,
COALESCE(MAX(t.cards_mined), 0) AS cardsMined,
COALESCE(MAX(t.lookup_count), 0) AS lookupCount,
COALESCE(MAX(t.lookup_hits), 0) AS lookupHits
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,
COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits
FROM imm_sessions s
LEFT JOIN imm_session_telemetry t ON t.session_id = s.session_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
LEFT JOIN imm_videos v ON v.video_id = s.video_id
WHERE s.video_id = ?
GROUP BY s.session_id
ORDER BY s.started_at_ms DESC
LIMIT ?
`,
@@ -891,6 +955,13 @@ export function getMediaDailyRollups(
return db
.prepare(
`
WITH recent_days AS (
SELECT DISTINCT rollup_day
FROM imm_daily_rollups
WHERE video_id = ?
ORDER BY rollup_day DESC
LIMIT ?
)
SELECT
rollup_day AS rollupDayOrMonth,
video_id AS videoId,
@@ -905,14 +976,47 @@ export function getMediaDailyRollups(
lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups
WHERE video_id = ?
ORDER BY rollup_day DESC
LIMIT ?
`,
AND rollup_day IN (SELECT rollup_day FROM recent_days)
ORDER BY rollup_day DESC, video_id DESC
`,
)
.all(videoId, limit) as unknown as ImmersionSessionRollupRow[];
.all(videoId, limit, videoId) as unknown as ImmersionSessionRollupRow[];
}
export function getAnimeDailyRollups(
db: DatabaseSync,
animeId: number,
limit = 90,
): ImmersionSessionRollupRow[] {
return db
.prepare(
`
WITH recent_days AS (
SELECT DISTINCT r.rollup_day
FROM imm_daily_rollups r
JOIN imm_videos v ON v.video_id = r.video_id
WHERE v.anime_id = ?
ORDER BY r.rollup_day DESC
LIMIT ?
)
SELECT r.rollup_day AS rollupDayOrMonth, r.video_id AS videoId,
r.total_sessions AS totalSessions, r.total_active_min AS totalActiveMin,
r.total_lines_seen AS totalLinesSeen, r.total_words_seen AS totalWordsSeen,
r.total_tokens_seen AS totalTokensSeen, r.total_cards AS totalCards,
r.cards_per_hour AS cardsPerHour, r.words_per_min AS wordsPerMin,
r.lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups r
JOIN imm_videos v ON v.video_id = r.video_id
WHERE v.anime_id = ?
AND r.rollup_day IN (SELECT rollup_day FROM recent_days)
ORDER BY r.rollup_day DESC, r.video_id DESC
`,
)
.all(animeId, limit, animeId) as unknown as ImmersionSessionRollupRow[];
}
export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow | null {
const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab');
return db
.prepare(
`
@@ -920,15 +1024,17 @@ export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow
a.video_id AS videoId,
a.anilist_id AS anilistId,
a.cover_url AS coverUrl,
a.cover_blob AS coverBlob,
${resolvedCoverBlob} AS coverBlob,
a.title_romaji AS titleRomaji,
a.title_english AS titleEnglish,
a.episodes_total AS episodesTotal,
a.fetched_at_ms AS fetchedAtMs
FROM imm_media_art a
JOIN imm_videos v ON v.video_id = a.video_id
LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash
WHERE v.anime_id = ?
AND a.cover_blob IS NOT NULL
AND ${resolvedCoverBlob} IS NOT NULL
ORDER BY a.fetched_at_ms DESC, a.video_id DESC
LIMIT 1
`,
)
@@ -936,20 +1042,22 @@ export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow
}
export function getCoverArt(db: DatabaseSync, videoId: number): MediaArtRow | null {
const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab');
return db
.prepare(
`
SELECT
video_id AS videoId,
anilist_id AS anilistId,
cover_url AS coverUrl,
cover_blob AS coverBlob,
title_romaji AS titleRomaji,
title_english AS titleEnglish,
episodes_total AS episodesTotal,
fetched_at_ms AS fetchedAtMs
FROM imm_media_art
WHERE video_id = ?
a.video_id AS videoId,
a.anilist_id AS anilistId,
a.cover_url AS coverUrl,
${resolvedCoverBlob} AS coverBlob,
a.title_romaji AS titleRomaji,
a.title_english AS titleEnglish,
a.episodes_total AS episodesTotal,
a.fetched_at_ms AS fetchedAtMs
FROM imm_media_art a
LEFT JOIN imm_cover_art_blobs cab ON cab.blob_hash = a.cover_blob_hash
WHERE a.video_id = ?
`,
)
.get(videoId) as unknown as MediaArtRow | null;
@@ -991,30 +1099,6 @@ export function getAnimeWords(db: DatabaseSync, animeId: number, limit = 50): An
.all(animeId, limit) as unknown as AnimeWordRow[];
}
export function getAnimeDailyRollups(
db: DatabaseSync,
animeId: number,
limit = 90,
): ImmersionSessionRollupRow[] {
return db
.prepare(
`
SELECT r.rollup_day AS rollupDayOrMonth, r.video_id AS videoId,
r.total_sessions AS totalSessions, r.total_active_min AS totalActiveMin,
r.total_lines_seen AS totalLinesSeen, r.total_words_seen AS totalWordsSeen,
r.total_tokens_seen AS totalTokensSeen, r.total_cards AS totalCards,
r.cards_per_hour AS cardsPerHour, r.words_per_min AS wordsPerMin,
r.lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups r
JOIN imm_videos v ON v.video_id = r.video_id
WHERE v.anime_id = ?
ORDER BY r.rollup_day DESC
LIMIT ?
`,
)
.all(animeId, limit) as unknown as ImmersionSessionRollupRow[];
}
export function getEpisodesPerDay(db: DatabaseSync, limit = 90): EpisodesPerDayRow[] {
return db
.prepare(
@@ -1203,23 +1287,23 @@ export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSu
return db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
s.session_id AS sessionId, s.video_id AS videoId,
v.canonical_title AS canonicalTitle,
s.started_at_ms AS startedAtMs, s.ended_at_ms AS endedAtMs,
COALESCE(MAX(t.total_watched_ms), 0) AS totalWatchedMs,
COALESCE(MAX(t.active_watched_ms), 0) AS activeWatchedMs,
COALESCE(MAX(t.lines_seen), 0) AS linesSeen,
COALESCE(MAX(t.words_seen), 0) AS wordsSeen,
COALESCE(MAX(t.tokens_seen), 0) AS tokensSeen,
COALESCE(MAX(t.cards_mined), 0) AS cardsMined,
COALESCE(MAX(t.lookup_count), 0) AS lookupCount,
COALESCE(MAX(t.lookup_hits), 0) AS lookupHits
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,
COALESCE(asm.lookupHits, s.lookup_hits, 0) AS lookupHits
FROM imm_sessions s
JOIN imm_videos v ON v.video_id = s.video_id
LEFT JOIN imm_session_telemetry t ON t.session_id = s.session_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
WHERE s.video_id = ?
GROUP BY s.session_id
ORDER BY s.started_at_ms DESC
`,
)
@@ -1271,24 +1355,52 @@ export function upsertCoverArt(
art: {
anilistId: number | null;
coverUrl: string | null;
coverBlob: Buffer | null;
coverBlob: ArrayBuffer | Uint8Array | Buffer | null;
titleRomaji: string | null;
titleEnglish: string | null;
episodesTotal: number | null;
},
): void {
const existing = db
.prepare(
`
SELECT cover_blob_hash AS coverBlobHash
FROM imm_media_art
WHERE video_id = ?
`,
)
.get(videoId) as { coverBlobHash: string | null } | undefined;
const sharedCoverBlobHash = findSharedCoverBlobHash(db, videoId, art.anilistId, art.coverUrl);
const nowMs = Date.now();
const coverBlob = normalizeCoverBlobBytes(art.coverBlob);
let coverBlobHash = sharedCoverBlobHash ?? existing?.coverBlobHash ?? null;
if (!coverBlobHash && coverBlob && coverBlob.length > 0) {
coverBlobHash = createHash('sha256').update(coverBlob).digest('hex');
}
if (coverBlobHash && coverBlob && coverBlob.length > 0 && !sharedCoverBlobHash) {
db.prepare(
`
INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE)
VALUES (?, ?, ?, ?)
ON CONFLICT(blob_hash) DO UPDATE SET
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`,
).run(coverBlobHash, coverBlob, nowMs, nowMs);
}
db.prepare(
`
INSERT INTO imm_media_art (
video_id, anilist_id, cover_url, cover_blob,
video_id, anilist_id, cover_url, cover_blob, cover_blob_hash,
title_romaji, title_english, episodes_total,
fetched_at_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(video_id) DO UPDATE SET
anilist_id = excluded.anilist_id,
cover_url = excluded.cover_url,
cover_blob = excluded.cover_blob,
cover_blob_hash = excluded.cover_blob_hash,
title_romaji = excluded.title_romaji,
title_english = excluded.title_english,
episodes_total = excluded.episodes_total,
@@ -1299,7 +1411,8 @@ export function upsertCoverArt(
videoId,
art.anilistId,
art.coverUrl,
art.coverBlob,
coverBlobHash ? buildCoverBlobReference(coverBlobHash) : coverBlob,
coverBlobHash,
art.titleRomaji,
art.titleEnglish,
art.episodesTotal,
@@ -1307,6 +1420,10 @@ export function upsertCoverArt(
nowMs,
nowMs,
);
if (existing?.coverBlobHash !== coverBlobHash) {
cleanupUnusedCoverArtBlobHash(db, existing?.coverBlobHash ?? null);
}
}
export function updateAnimeAnilistInfo(
@@ -1378,6 +1495,15 @@ export function deleteSession(db: DatabaseSync, sessionId: number): void {
}
export function deleteVideo(db: DatabaseSync, videoId: number): void {
const artRow = db
.prepare(
`
SELECT cover_blob_hash AS coverBlobHash
FROM imm_media_art
WHERE video_id = ?
`,
)
.get(videoId) as { coverBlobHash: string | null } | undefined;
const sessions = db
.prepare('SELECT session_id FROM imm_sessions WHERE video_id = ?')
.all(videoId) as Array<{ session_id: number }>;
@@ -1388,5 +1514,6 @@ export function deleteVideo(db: DatabaseSync, videoId: number): void {
db.prepare('DELETE FROM imm_daily_rollups WHERE video_id = ?').run(videoId);
db.prepare('DELETE FROM imm_monthly_rollups WHERE video_id = ?').run(videoId);
db.prepare('DELETE FROM imm_media_art WHERE video_id = ?').run(videoId);
cleanupUnusedCoverArtBlobHash(db, artRow?.coverBlobHash ?? null);
db.prepare('DELETE FROM imm_videos WHERE video_id = ?').run(videoId);
}

View File

@@ -39,8 +39,39 @@ export function finalizeSessionRecord(
SET
ended_at_ms = ?,
status = ?,
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
words_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
lookup_hits = ?,
pause_count = ?,
pause_ms = ?,
seek_forward_count = ?,
seek_backward_count = ?,
media_buffer_events = ?,
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
).run(endedAtMs, SESSION_STATUS_ENDED, Date.now(), sessionState.sessionId);
).run(
endedAtMs,
SESSION_STATUS_ENDED,
sessionState.totalWatchedMs,
sessionState.activeWatchedMs,
sessionState.linesSeen,
sessionState.wordsSeen,
sessionState.tokensSeen,
sessionState.cardsMined,
sessionState.lookupCount,
sessionState.lookupHits,
sessionState.pauseCount,
sessionState.pauseMs,
sessionState.seekForwardCount,
sessionState.seekBackwardCount,
sessionState.mediaBufferEvents,
Date.now(),
sessionState.sessionId,
);
}

View File

@@ -9,6 +9,8 @@ import {
createTrackerPreparedStatements,
ensureSchema,
executeQueuedWrite,
normalizeCoverBlobBytes,
parseCoverBlobReference,
getOrCreateAnimeRecord,
getOrCreateVideoRecord,
linkVideoToAnimeRecord,
@@ -74,6 +76,7 @@ test('ensureSchema creates immersion core tables', () => {
assert.ok(tableNames.has('imm_word_line_occurrences'));
assert.ok(tableNames.has('imm_kanji_line_occurrences'));
assert.ok(tableNames.has('imm_rollup_state'));
assert.ok(tableNames.has('imm_cover_art_blobs'));
const videoColumns = new Set(
(
@@ -92,6 +95,15 @@ test('ensureSchema creates immersion core tables', () => {
assert.ok(videoColumns.has('parser_confidence'));
assert.ok(videoColumns.has('parse_metadata_json'));
const mediaArtColumns = new Set(
(
db.prepare('PRAGMA table_info(imm_media_art)').all() as Array<{
name: string;
}>
).map((row) => row.name),
);
assert.ok(mediaArtColumns.has('cover_blob_hash'));
const rollupStateRow = db
.prepare('SELECT state_value FROM imm_rollup_state WHERE state_key = ?')
.get('last_rollup_sample_ms') as {
@@ -105,6 +117,33 @@ test('ensureSchema creates immersion core tables', () => {
}
});
test('ensureSchema creates large-history performance indexes', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const indexNames = new Set(
(
db.prepare(`SELECT name FROM sqlite_master WHERE type = 'index' AND name LIKE 'idx_%'`).all() as Array<{
name: string;
}>
).map((row) => row.name),
);
assert.ok(indexNames.has('idx_telemetry_sample_ms'));
assert.ok(indexNames.has('idx_sessions_started_at'));
assert.ok(indexNames.has('idx_sessions_ended_at'));
assert.ok(indexNames.has('idx_words_frequency'));
assert.ok(indexNames.has('idx_kanji_frequency'));
assert.ok(indexNames.has('idx_media_art_anilist_id'));
assert.ok(indexNames.has('idx_media_art_cover_url'));
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('ensureSchema migrates legacy videos and backfills anime metadata from filenames', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -440,6 +479,67 @@ test('ensureSchema adds subtitle-line occurrence tables to schema version 6 data
}
});
test('ensureSchema migrates legacy cover art blobs into the shared blob store', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
db.prepare('UPDATE imm_schema_version SET schema_version = 12').run();
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/legacy-cover-art.mkv', {
canonicalTitle: 'Legacy Cover Art',
sourcePath: '/tmp/legacy-cover-art.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const legacyBlob = Uint8Array.from([0xde, 0xad, 0xbe, 0xef]);
db.prepare(
`
INSERT INTO imm_media_art (
video_id,
anilist_id,
cover_url,
cover_blob,
cover_blob_hash,
title_romaji,
title_english,
episodes_total,
fetched_at_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(videoId, null, null, legacyBlob, null, null, null, null, 1, 1, 1);
assert.doesNotThrow(() => ensureSchema(db));
const mediaArtRow = db
.prepare('SELECT cover_blob AS coverBlob, cover_blob_hash AS coverBlobHash FROM imm_media_art')
.get() as {
coverBlob: ArrayBuffer | Uint8Array | Buffer | null;
coverBlobHash: string | null;
} | null;
assert.ok(mediaArtRow);
assert.ok(mediaArtRow?.coverBlobHash);
assert.equal(parseCoverBlobReference(normalizeCoverBlobBytes(mediaArtRow?.coverBlob)), mediaArtRow?.coverBlobHash);
const sharedBlobRow = db
.prepare('SELECT cover_blob AS coverBlob FROM imm_cover_art_blobs WHERE blob_hash = ?')
.get(mediaArtRow?.coverBlobHash) as {
coverBlob: ArrayBuffer | Uint8Array | Buffer;
} | null;
assert.ok(sharedBlobRow);
assert.equal(normalizeCoverBlobBytes(sharedBlobRow?.coverBlob)?.toString('hex'), 'deadbeef');
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('anime rows are reused by normalized parsed title and upgraded with AniList metadata', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);

View File

@@ -1,3 +1,4 @@
import { createHash } from 'node:crypto';
import { parseMediaInfo } from '../../../jimaku/utils';
import type { DatabaseSync } from './sqlite';
import { SCHEMA_VERSION } from './types';
@@ -37,6 +38,92 @@ export interface VideoAnimeLinkInput {
parseMetadataJson: string | null;
}
const COVER_BLOB_REFERENCE_PREFIX = '__subminer_cover_blob_ref__:';
export type CoverBlobBytes = ArrayBuffer | Uint8Array | Buffer;
export function buildCoverBlobReference(hash: string): Buffer {
return Buffer.from(`${COVER_BLOB_REFERENCE_PREFIX}${hash}`, 'utf8');
}
export function normalizeCoverBlobBytes(blob: CoverBlobBytes | null | undefined): Buffer | null {
if (!blob) {
return null;
}
if (Buffer.isBuffer(blob)) {
return blob;
}
if (blob instanceof ArrayBuffer) {
return Buffer.from(blob);
}
return Buffer.from(blob.buffer, blob.byteOffset, blob.byteLength);
}
export function parseCoverBlobReference(blob: CoverBlobBytes | null | undefined): string | null {
const normalizedBlob = normalizeCoverBlobBytes(blob);
if (!normalizedBlob || normalizedBlob.length === 0) {
return null;
}
const value = normalizedBlob.toString('utf8');
if (!value.startsWith(COVER_BLOB_REFERENCE_PREFIX)) {
return null;
}
const hash = value.slice(COVER_BLOB_REFERENCE_PREFIX.length);
return hash.length > 0 ? hash : null;
}
function deduplicateExistingCoverArtRows(db: DatabaseSync): void {
const rows = db
.prepare(
`
SELECT video_id, cover_blob, cover_blob_hash
FROM imm_media_art
WHERE cover_blob IS NOT NULL
`,
)
.all() as Array<{
video_id: number;
cover_blob: CoverBlobBytes | null;
cover_blob_hash: string | null;
}>;
if (rows.length === 0) {
return;
}
const nowMs = Date.now();
const upsertBlobStmt = db.prepare(`
INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE)
VALUES (?, ?, ?, ?)
ON CONFLICT(blob_hash) DO UPDATE SET
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`);
const updateMediaStmt = db.prepare(`
UPDATE imm_media_art
SET cover_blob = ?, cover_blob_hash = ?, LAST_UPDATE_DATE = ?
WHERE video_id = ?
`);
for (const row of rows) {
const coverBlob = normalizeCoverBlobBytes(row.cover_blob);
if (!coverBlob || coverBlob.length === 0) {
continue;
}
const refHash = parseCoverBlobReference(coverBlob);
if (refHash) {
if (row.cover_blob_hash !== refHash) {
updateMediaStmt.run(coverBlob, refHash, nowMs, row.video_id);
}
continue;
}
const hash = createHash('sha256').update(coverBlob).digest('hex');
upsertBlobStmt.run(hash, coverBlob, nowMs, nowMs);
updateMediaStmt.run(buildCoverBlobReference(hash), hash, nowMs, row.video_id);
}
}
function hasColumn(db: DatabaseSync, tableName: string, columnName: string): boolean {
return db
.prepare(`PRAGMA table_info(${tableName})`)
@@ -147,6 +234,102 @@ function parseLegacyAnimeBackfillCandidate(
};
}
function ensureLifetimeSummaryTables(db: DatabaseSync): void {
const nowMs = Date.now();
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_global(
global_id INTEGER PRIMARY KEY CHECK(global_id = 1),
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_ms INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
active_days INTEGER NOT NULL DEFAULT 0,
episodes_started INTEGER NOT NULL DEFAULT 0,
episodes_completed INTEGER NOT NULL DEFAULT 0,
anime_completed INTEGER NOT NULL DEFAULT 0,
last_rebuilt_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
)
`);
db.exec(`
INSERT INTO imm_lifetime_global(
global_id,
total_sessions,
total_active_ms,
total_cards,
active_days,
episodes_started,
episodes_completed,
anime_completed,
last_rebuilt_ms,
CREATED_DATE,
LAST_UPDATE_DATE
)
SELECT
1,
0,
0,
0,
0,
0,
0,
0,
NULL,
${nowMs},
${nowMs}
WHERE NOT EXISTS (SELECT 1 FROM imm_lifetime_global LIMIT 1)
`);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_anime(
anime_id INTEGER PRIMARY KEY,
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_ms INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
episodes_started INTEGER NOT NULL DEFAULT 0,
episodes_completed INTEGER NOT NULL DEFAULT 0,
first_watched_ms INTEGER,
last_watched_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE CASCADE
)
`);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_media(
video_id INTEGER PRIMARY KEY,
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_ms INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
completed INTEGER NOT NULL DEFAULT 0,
first_watched_ms INTEGER,
last_watched_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
)
`);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_applied_sessions(
session_id INTEGER PRIMARY KEY,
applied_at_ms INTEGER NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
)
`);
}
export function getOrCreateAnimeRecord(db: DatabaseSync, input: AnimeRecordInput): number {
const normalizedTitleKey = normalizeAnimeIdentityKey(input.parsedTitle);
if (!normalizedTitleKey) {
@@ -328,6 +511,7 @@ export function ensureSchema(db: DatabaseSync): void {
.prepare('SELECT schema_version FROM imm_schema_version ORDER BY schema_version DESC LIMIT 1')
.get() as { schema_version: number } | null;
if (currentVersion?.schema_version === SCHEMA_VERSION) {
ensureLifetimeSummaryTables(db);
return;
}
@@ -385,6 +569,19 @@ export function ensureSchema(db: DatabaseSync): void {
status INTEGER NOT NULL,
locale_id INTEGER, target_lang_id INTEGER,
difficulty_tier INTEGER, subtitle_mode INTEGER,
total_watched_ms INTEGER NOT NULL DEFAULT 0,
active_watched_ms INTEGER NOT NULL DEFAULT 0,
lines_seen INTEGER NOT NULL DEFAULT 0,
words_seen INTEGER NOT NULL DEFAULT 0,
tokens_seen INTEGER NOT NULL DEFAULT 0,
cards_mined INTEGER NOT NULL DEFAULT 0,
lookup_count INTEGER NOT NULL DEFAULT 0,
lookup_hits INTEGER NOT NULL DEFAULT 0,
pause_count INTEGER NOT NULL DEFAULT 0,
pause_ms INTEGER NOT NULL DEFAULT 0,
seek_forward_count INTEGER NOT NULL DEFAULT 0,
seek_backward_count INTEGER NOT NULL DEFAULT 0,
media_buffer_events INTEGER NOT NULL DEFAULT 0,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id)
@@ -536,6 +733,7 @@ export function ensureSchema(db: DatabaseSync): void {
anilist_id INTEGER,
cover_url TEXT,
cover_blob BLOB,
cover_blob_hash TEXT,
title_romaji TEXT,
title_english TEXT,
episodes_total INTEGER,
@@ -545,6 +743,14 @@ export function ensureSchema(db: DatabaseSync): void {
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
);
`);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_cover_art_blobs(
blob_hash TEXT PRIMARY KEY,
cover_blob BLOB NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
);
`);
if (currentVersion?.schema_version === 1) {
addColumnIfMissing(db, 'imm_videos', 'CREATED_DATE');
@@ -681,6 +887,134 @@ export function ensureSchema(db: DatabaseSync): void {
addColumnIfMissing(db, 'imm_subtitle_lines', 'secondary_text', 'TEXT');
}
if (currentVersion?.schema_version && currentVersion.schema_version < 11) {
addColumnIfMissing(db, 'imm_sessions', 'total_watched_ms', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'active_watched_ms', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'lines_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'words_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'tokens_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'cards_mined', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'lookup_count', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'lookup_hits', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'pause_count', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'pause_ms', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'seek_forward_count', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'seek_backward_count', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'media_buffer_events', 'INTEGER NOT NULL DEFAULT 0');
db.exec(`
UPDATE imm_sessions
SET
total_watched_ms = COALESCE((
SELECT t.total_watched_ms
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), total_watched_ms),
active_watched_ms = COALESCE((
SELECT t.active_watched_ms
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), active_watched_ms),
lines_seen = COALESCE((
SELECT t.lines_seen
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), lines_seen),
words_seen = COALESCE((
SELECT t.words_seen
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), words_seen),
tokens_seen = COALESCE((
SELECT t.tokens_seen
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), tokens_seen),
cards_mined = COALESCE((
SELECT t.cards_mined
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), cards_mined),
lookup_count = COALESCE((
SELECT t.lookup_count
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), lookup_count),
lookup_hits = COALESCE((
SELECT t.lookup_hits
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), lookup_hits),
pause_count = COALESCE((
SELECT t.pause_count
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), pause_count),
pause_ms = COALESCE((
SELECT t.pause_ms
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), pause_ms),
seek_forward_count = COALESCE((
SELECT t.seek_forward_count
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), seek_forward_count),
seek_backward_count = COALESCE((
SELECT t.seek_backward_count
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), seek_backward_count),
media_buffer_events = COALESCE((
SELECT t.media_buffer_events
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), media_buffer_events)
WHERE ended_at_ms IS NOT NULL
`);
}
if (currentVersion?.schema_version && currentVersion.schema_version < 13) {
addColumnIfMissing(db, 'imm_media_art', 'cover_blob_hash', 'TEXT');
db.exec(`
CREATE TABLE IF NOT EXISTS imm_cover_art_blobs(
blob_hash TEXT PRIMARY KEY,
cover_blob BLOB NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
)
`);
deduplicateExistingCoverArtRows(db);
}
ensureLifetimeSummaryTables(db);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_anime_normalized_title
ON imm_anime(normalized_title_key)
@@ -701,10 +1035,22 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE INDEX IF NOT EXISTS idx_sessions_status_started
ON imm_sessions(status, started_at_ms DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_sessions_started_at
ON imm_sessions(started_at_ms DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_sessions_ended_at
ON imm_sessions(ended_at_ms DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_telemetry_session_sample
ON imm_session_telemetry(session_id, sample_ms DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_telemetry_sample_ms
ON imm_session_telemetry(sample_ms DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_events_session_ts
ON imm_session_events(session_id, ts_ms DESC)
@@ -725,10 +1071,18 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE INDEX IF NOT EXISTS idx_words_headword_word_reading
ON imm_words(headword, word, reading)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_words_frequency
ON imm_words(frequency DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_kanji_kanji
ON imm_kanji(kanji)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_kanji_frequency
ON imm_kanji(frequency DESC)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_subtitle_lines_session_line
ON imm_subtitle_lines(session_id, line_index)
@@ -749,6 +1103,18 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE INDEX IF NOT EXISTS idx_kanji_line_occurrences_kanji
ON imm_kanji_line_occurrences(kanji_id, line_id)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_media_art_cover_blob_hash
ON imm_media_art(cover_blob_hash)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_media_art_anilist_id
ON imm_media_art(anilist_id)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_media_art_cover_url
ON imm_media_art(cover_url)
`);
if (currentVersion?.schema_version && currentVersion.schema_version < SCHEMA_VERSION) {
db.exec('DELETE FROM imm_daily_rollups');

View File

@@ -1,4 +1,4 @@
export const SCHEMA_VERSION = 10;
export const SCHEMA_VERSION = 13;
export const DEFAULT_QUEUE_CAP = 1_000;
export const DEFAULT_BATCH_SIZE = 25;
export const DEFAULT_FLUSH_INTERVAL_MS = 500;
@@ -7,6 +7,7 @@ const ONE_WEEK_MS = 7 * 24 * 60 * 60 * 1000;
export const DEFAULT_EVENTS_RETENTION_MS = ONE_WEEK_MS;
export const DEFAULT_VACUUM_INTERVAL_MS = ONE_WEEK_MS;
export const DEFAULT_TELEMETRY_RETENTION_MS = 30 * 24 * 60 * 60 * 1000;
export const DEFAULT_SESSIONS_RETENTION_MS = 30 * 24 * 60 * 60 * 1000;
export const DEFAULT_DAILY_ROLLUP_RETENTION_MS = 365 * 24 * 60 * 60 * 1000;
export const DEFAULT_MONTHLY_ROLLUP_RETENTION_MS = 5 * 365 * 24 * 60 * 60 * 1000;
export const DEFAULT_MAX_PAYLOAD_BYTES = 256;
@@ -43,6 +44,7 @@ export interface ImmersionTrackerPolicy {
retention?: {
eventsDays?: number;
telemetryDays?: number;
sessionsDays?: number;
dailyRollupsDays?: number;
monthlyRollupsDays?: number;
vacuumIntervalDays?: number;
@@ -233,6 +235,54 @@ export interface SessionSummaryQueryRow {
lookupHits: number;
}
export interface LifetimeGlobalRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
activeDays: number;
episodesStarted: number;
episodesCompleted: number;
animeCompleted: number;
lastRebuiltMs: number | null;
}
export interface LifetimeAnimeRow {
animeId: number;
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalLinesSeen: number;
totalTokensSeen: number;
episodesStarted: number;
episodesCompleted: number;
firstWatchedMs: number | null;
lastWatchedMs: number | null;
}
export interface LifetimeMediaRow {
videoId: number;
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalLinesSeen: number;
totalTokensSeen: number;
completed: number;
firstWatchedMs: number | null;
lastWatchedMs: number | null;
}
export interface AppliedSessionRow {
sessionId: number;
appliedAtMs: number;
}
export interface LifetimeRebuildSummary {
appliedSessions: number;
rebuiltAtMs: number;
}
export interface VocabularyStatsRow {
wordId: number;
headword: string;

View File

@@ -290,6 +290,13 @@ test('registerIpcHandlers returns empty stats overview shape without a tracker',
hints: {
totalSessions: 0,
activeSessions: 0,
episodesToday: 0,
activeAnimeCount: 0,
totalCards: 0,
totalActiveMin: 0,
activeDays: 0,
totalEpisodesWatched: 0,
totalAnimeCompleted: 0,
},
});
});
@@ -318,6 +325,11 @@ test('registerIpcHandlers validates and clamps stats request limits', async () =
activeSessions: 0,
episodesToday: 0,
activeAnimeCount: 0,
totalCards: 0,
totalActiveMin: 0,
activeDays: 0,
totalEpisodesWatched: 0,
totalAnimeCompleted: 0,
}),
getSessionTimeline: async (sessionId: number, limit = 0) => {
calls.push(['timeline', limit, sessionId]);

View File

@@ -78,6 +78,11 @@ export interface IpcServiceDeps {
activeSessions: number;
episodesToday: number;
activeAnimeCount: number;
totalActiveMin: number;
totalCards: number;
activeDays: number;
totalEpisodesWatched: number;
totalAnimeCompleted: number;
}>;
getSessionTimeline: (sessionId: number, limit?: number) => Promise<unknown>;
getSessionEvents: (sessionId: number, limit?: number) => Promise<unknown>;
@@ -457,6 +462,13 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
hints: {
totalSessions: 0,
activeSessions: 0,
episodesToday: 0,
activeAnimeCount: 0,
totalActiveMin: 0,
totalCards: 0,
activeDays: 0,
totalEpisodesWatched: 0,
totalAnimeCompleted: 0,
},
};
}

View File

@@ -213,3 +213,49 @@ test('initializeOverlayRuntime re-syncs overlay shortcuts when tracker focus cha
tracker.onWindowFocusChange?.(true);
assert.equal(syncCalls, 1);
});
test('initializeOverlayRuntime refreshes visible overlay when tracker focus changes while overlay is shown', () => {
let visibilityRefreshCalls = 0;
const tracker = {
onGeometryChange: null as ((...args: unknown[]) => void) | null,
onWindowFound: null as ((...args: unknown[]) => void) | null,
onWindowLost: null as (() => void) | null,
onWindowFocusChange: null as ((focused: boolean) => void) | null,
start: () => {},
};
initializeOverlayRuntime({
backendOverride: null,
createMainWindow: () => {},
registerGlobalShortcuts: () => {},
updateVisibleOverlayBounds: () => {},
isVisibleOverlayVisible: () => true,
updateVisibleOverlayVisibility: () => {
visibilityRefreshCalls += 1;
},
getOverlayWindows: () => [],
syncOverlayShortcuts: () => {},
setWindowTracker: () => {},
getMpvSocketPath: () => '/tmp/mpv.sock',
createWindowTracker: () => tracker as never,
getResolvedConfig: () => ({
ankiConnect: { enabled: false } as never,
}),
getSubtitleTimingTracker: () => null,
getMpvClient: () => null,
getRuntimeOptionsManager: () => null,
setAnkiIntegration: () => {},
showDesktopNotification: () => {},
createFieldGroupingCallback: () => async () => ({
keepNoteId: 1,
deleteNoteId: 2,
deleteDuplicate: false,
cancelled: false,
}),
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
});
tracker.onWindowFocusChange?.(true);
assert.equal(visibilityRefreshCalls, 2);
});

View File

@@ -90,9 +90,6 @@ export function initializeOverlayRuntime(options: {
windowTracker.onGeometryChange = (geometry: WindowGeometry) => {
options.updateVisibleOverlayBounds(geometry);
};
windowTracker.onTargetWindowFocusChange = () => {
options.syncOverlayShortcuts();
};
windowTracker.onWindowFound = (geometry: WindowGeometry) => {
options.updateVisibleOverlayBounds(geometry);
if (options.isVisibleOverlayVisible()) {
@@ -106,6 +103,9 @@ export function initializeOverlayRuntime(options: {
options.syncOverlayShortcuts();
};
windowTracker.onWindowFocusChange = () => {
if (options.isVisibleOverlayVisible()) {
options.updateVisibleOverlayVisibility();
}
options.syncOverlayShortcuts();
};
windowTracker.start();

View File

@@ -200,6 +200,81 @@ test('Windows visible overlay stays click-through and does not steal focus while
assert.ok(!calls.includes('focus'));
});
test('macOS tracked visible overlay stays visible without passively stealing focus', () => {
const { window, calls } = createMainWindowRecorder();
const tracker: WindowTrackerStub = {
isTracking: () => true,
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
};
updateVisibleOverlayVisibility({
visibleOverlayVisible: true,
mainWindow: window as never,
windowTracker: tracker as never,
trackerNotReadyWarningShown: false,
setTrackerNotReadyWarningShown: () => {},
updateVisibleOverlayBounds: () => {
calls.push('update-bounds');
},
ensureOverlayWindowLevel: () => {
calls.push('ensure-level');
},
syncPrimaryOverlayWindowLayer: () => {
calls.push('sync-layer');
},
enforceOverlayLayerOrder: () => {
calls.push('enforce-order');
},
syncOverlayShortcuts: () => {
calls.push('sync-shortcuts');
},
isMacOSPlatform: true,
isWindowsPlatform: false,
} as never);
assert.ok(calls.includes('mouse-ignore:false:plain'));
assert.ok(calls.includes('show'));
assert.ok(!calls.includes('focus'));
});
test('forced mouse passthrough keeps macOS tracked overlay passive while visible', () => {
const { window, calls } = createMainWindowRecorder();
const tracker: WindowTrackerStub = {
isTracking: () => true,
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
};
updateVisibleOverlayVisibility({
visibleOverlayVisible: true,
mainWindow: window as never,
windowTracker: tracker as never,
trackerNotReadyWarningShown: false,
setTrackerNotReadyWarningShown: () => {},
updateVisibleOverlayBounds: () => {
calls.push('update-bounds');
},
ensureOverlayWindowLevel: () => {
calls.push('ensure-level');
},
syncPrimaryOverlayWindowLayer: () => {
calls.push('sync-layer');
},
enforceOverlayLayerOrder: () => {
calls.push('enforce-order');
},
syncOverlayShortcuts: () => {
calls.push('sync-shortcuts');
},
isMacOSPlatform: true,
isWindowsPlatform: false,
forceMousePassthrough: true,
} as never);
assert.ok(calls.includes('mouse-ignore:true:forward'));
assert.ok(calls.includes('show'));
assert.ok(!calls.includes('focus'));
});
test('Windows keeps visible overlay hidden while tracker is not ready', () => {
const { window, calls } = createMainWindowRecorder();
let trackerWarning = false;

View File

@@ -4,6 +4,7 @@ import { WindowGeometry } from '../../types';
export function updateVisibleOverlayVisibility(args: {
visibleOverlayVisible: boolean;
forceMousePassthrough?: boolean;
mainWindow: BrowserWindow | null;
windowTracker: BaseWindowTracker | null;
trackerNotReadyWarningShown: boolean;
@@ -25,14 +26,15 @@ export function updateVisibleOverlayVisibility(args: {
const mainWindow = args.mainWindow;
const showPassiveVisibleOverlay = (): void => {
if (args.isWindowsPlatform) {
const forceMousePassthrough = args.forceMousePassthrough === true;
if (args.isWindowsPlatform || forceMousePassthrough) {
mainWindow.setIgnoreMouseEvents(true, { forward: true });
} else {
mainWindow.setIgnoreMouseEvents(false);
}
args.ensureOverlayWindowLevel(mainWindow);
mainWindow.show();
if (!args.isWindowsPlatform) {
if (!args.isWindowsPlatform && !args.isMacOSPlatform && !forceMousePassthrough) {
mainWindow.focus();
}
};

View File

@@ -46,6 +46,7 @@ export function ensureOverlayWindowLevel(window: BrowserWindow): void {
window.setAlwaysOnTop(true, 'screen-saver', 1);
window.setVisibleOnAllWorkspaces(true, { visibleOnFullScreen: true });
window.setFullScreenable(false);
window.moveTop();
return;
}
if (process.platform === 'win32') {

View File

@@ -1,9 +1,12 @@
import type { BrowserWindowConstructorOptions } from 'electron';
import type { BrowserWindow, BrowserWindowConstructorOptions } from 'electron';
import type { WindowGeometry } from '../../types';
const DEFAULT_STATS_WINDOW_WIDTH = 900;
const DEFAULT_STATS_WINDOW_HEIGHT = 700;
type StatsWindowLevelController = Pick<BrowserWindow, 'setAlwaysOnTop' | 'moveTop'> &
Partial<Pick<BrowserWindow, 'setVisibleOnAllWorkspaces' | 'setFullScreenable'>>;
function isBareToggleKeyInput(input: Electron.Input, toggleKey: string): boolean {
return (
input.type === 'keyDown' &&
@@ -51,6 +54,28 @@ export function buildStatsWindowOptions(options: {
};
}
export function promoteStatsWindowLevel(
window: StatsWindowLevelController,
platform: NodeJS.Platform = process.platform,
): void {
if (platform === 'darwin') {
window.setAlwaysOnTop(true, 'screen-saver', 2);
window.setVisibleOnAllWorkspaces?.(true, { visibleOnFullScreen: true });
window.setFullScreenable?.(false);
window.moveTop();
return;
}
if (platform === 'win32') {
window.setAlwaysOnTop(true, 'screen-saver', 2);
window.moveTop();
return;
}
window.setAlwaysOnTop(true);
window.moveTop();
}
export function buildStatsWindowLoadFileOptions(apiBaseUrl?: string): {
query: Record<string, string>;
} {

View File

@@ -3,6 +3,7 @@ import test from 'node:test';
import {
buildStatsWindowLoadFileOptions,
buildStatsWindowOptions,
promoteStatsWindowLevel,
shouldHideStatsWindowForInput,
} from './stats-window-runtime';
@@ -149,3 +150,50 @@ test('buildStatsWindowLoadFileOptions includes provided stats API base URL', ()
},
});
});
test('promoteStatsWindowLevel raises stats above overlay level on macOS', () => {
const calls: string[] = [];
promoteStatsWindowLevel(
{
setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => {
calls.push(`always-on-top:${flag}:${level ?? 'none'}:${relativeLevel ?? 0}`);
},
setVisibleOnAllWorkspaces: (visible: boolean, options?: { visibleOnFullScreen?: boolean }) => {
calls.push(
`all-workspaces:${visible}:${options?.visibleOnFullScreen === true ? 'fullscreen' : 'plain'}`,
);
},
setFullScreenable: (fullscreenable: boolean) => {
calls.push(`fullscreenable:${fullscreenable}`);
},
moveTop: () => {
calls.push('move-top');
},
} as never,
'darwin',
);
assert.deepEqual(calls, [
'always-on-top:true:screen-saver:2',
'all-workspaces:true:fullscreen',
'fullscreenable:false',
'move-top',
]);
});
test('promoteStatsWindowLevel raises stats above overlay level on Windows', () => {
const calls: string[] = [];
promoteStatsWindowLevel(
{
setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => {
calls.push(`always-on-top:${flag}:${level ?? 'none'}:${relativeLevel ?? 0}`);
},
moveTop: () => {
calls.push('move-top');
},
} as never,
'win32',
);
assert.deepEqual(calls, ['always-on-top:true:screen-saver:2', 'move-top']);
});

View File

@@ -5,6 +5,7 @@ import { IPC_CHANNELS } from '../../shared/ipc/contracts.js';
import {
buildStatsWindowLoadFileOptions,
buildStatsWindowOptions,
promoteStatsWindowLevel,
shouldHideStatsWindowForInput,
} from './stats-window-runtime.js';
@@ -22,6 +23,8 @@ export interface StatsWindowOptions {
getToggleKey: () => string;
/** Resolve the tracked overlay/mpv bounds */
resolveBounds: () => WindowGeometry | null;
/** Notify the main process when the stats overlay becomes visible/hidden */
onVisibilityChanged?: (visible: boolean) => void;
}
function syncStatsWindowBounds(window: BrowserWindow, bounds: WindowGeometry | null): void {
@@ -34,6 +37,15 @@ function syncStatsWindowBounds(window: BrowserWindow, bounds: WindowGeometry | n
});
}
function showStatsWindow(window: BrowserWindow, options: StatsWindowOptions): void {
syncStatsWindowBounds(window, options.resolveBounds());
promoteStatsWindowLevel(window);
window.show();
window.focus();
options.onVisibilityChanged?.(true);
promoteStatsWindowLevel(window);
}
/**
* Toggle the stats overlay window: create on first call, then show/hide.
* The React app stays mounted across toggles — state is preserved.
@@ -51,6 +63,7 @@ export function toggleStatsOverlay(options: StatsWindowOptions): void {
statsWindow.loadFile(indexPath, buildStatsWindowLoadFileOptions(options.getApiBaseUrl?.()));
statsWindow.on('closed', () => {
options.onVisibilityChanged?.(false);
statsWindow = null;
});
@@ -58,21 +71,26 @@ export function toggleStatsOverlay(options: StatsWindowOptions): void {
if (shouldHideStatsWindowForInput(input, options.getToggleKey())) {
event.preventDefault();
statsWindow?.hide();
options.onVisibilityChanged?.(false);
}
});
statsWindow.once('ready-to-show', () => {
if (statsWindow) {
syncStatsWindowBounds(statsWindow, options.resolveBounds());
if (!statsWindow) return;
showStatsWindow(statsWindow, options);
});
statsWindow.on('blur', () => {
if (!statsWindow || statsWindow.isDestroyed() || !statsWindow.isVisible()) {
return;
}
statsWindow?.show();
promoteStatsWindowLevel(statsWindow);
});
} else if (statsWindow.isVisible()) {
statsWindow.hide();
options.onVisibilityChanged?.(false);
} else {
syncStatsWindowBounds(statsWindow, options.resolveBounds());
statsWindow.show();
statsWindow.focus();
showStatsWindow(statsWindow, options);
}
}

View File

@@ -108,8 +108,9 @@ test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence'
partOfSpeech: PartOfSpeech.other,
isMerged: false,
isKnown: false,
isNPlusOneTarget: false,
isNPlusOneTarget: true,
isNameMatch: true,
jlptLevel: 'N5',
frequencyRank: 12,
},
],
@@ -122,9 +123,11 @@ test('serializeSubtitleMarkup preserves tooltip attrs and name-match precedence'
);
assert.match(
markup,
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア" data-frequency-rank="12">アレクシア<\/span>/,
/<span class="word word-name-match" data-reading="あれくしあ" data-headword="アレクシア">アレクシア<\/span>/,
);
assert.doesNotMatch(markup, /word-name-match word-known|word-known word-name-match/);
assert.doesNotMatch(markup, /word-name-match word-n-plus-one|word-n-plus-one word-name-match/);
assert.doesNotMatch(markup, /data-frequency-rank="12"|data-jlpt-level="N5"|word-jlpt-n5/);
});
test('serializeSubtitleWebsocketMessage emits sentence payload', () => {

View File

@@ -47,10 +47,15 @@ function escapeHtml(text: string): string {
.replaceAll("'", '&#39;');
}
function hasPrioritizedNameMatch(token: MergedToken): boolean {
return token.isNameMatch === true;
}
function computeFrequencyClass(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): string | null {
if (hasPrioritizedNameMatch(token)) return null;
if (!options.enabled) return null;
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
@@ -70,6 +75,7 @@ function getFrequencyRankLabel(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): string | null {
if (hasPrioritizedNameMatch(token)) return null;
if (!options.enabled) return null;
if (typeof token.frequencyRank !== 'number' || !Number.isFinite(token.frequencyRank)) return null;
@@ -79,21 +85,25 @@ function getFrequencyRankLabel(
}
function getJlptLevelLabel(token: MergedToken): string | null {
if (hasPrioritizedNameMatch(token)) {
return null;
}
return token.jlptLevel ?? null;
}
function computeWordClass(token: MergedToken, options: SubtitleWebsocketFrequencyOptions): string {
const classes = ['word'];
if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (token.isNameMatch) {
if (hasPrioritizedNameMatch(token)) {
classes.push('word-name-match');
} else if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (token.isKnown) {
classes.push('word-known');
}
if (token.jlptLevel) {
if (!hasPrioritizedNameMatch(token) && token.jlptLevel) {
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
}
@@ -137,6 +147,8 @@ function serializeSubtitleToken(
token: MergedToken,
options: SubtitleWebsocketFrequencyOptions,
): SerializedSubtitleToken {
const prioritizedNameMatch = hasPrioritizedNameMatch(token);
return {
surface: token.surface,
reading: token.reading,
@@ -146,10 +158,10 @@ function serializeSubtitleToken(
partOfSpeech: token.partOfSpeech,
isMerged: token.isMerged,
isKnown: token.isKnown,
isNPlusOneTarget: token.isNPlusOneTarget,
isNPlusOneTarget: prioritizedNameMatch ? false : token.isNPlusOneTarget,
isNameMatch: token.isNameMatch ?? false,
jlptLevel: token.jlptLevel,
frequencyRank: token.frequencyRank,
jlptLevel: prioritizedNameMatch ? undefined : token.jlptLevel,
frequencyRank: prioritizedNameMatch ? undefined : token.frequencyRank,
className: computeWordClass(token, options),
frequencyRankLabel: getFrequencyRankLabel(token, options),
jlptLevelLabel: getJlptLevelLabel(token),

View File

@@ -150,6 +150,76 @@ test('annotateTokens handles JLPT disabled and eligibility exclusion paths', ()
assert.equal(excludedLookupCalls, 0);
});
test('annotateTokens prioritizes name matches over n+1, frequency, and JLPT when enabled', () => {
let jlptLookupCalls = 0;
const tokens = [
makeToken({
surface: 'オリヴィア',
reading: 'オリヴィア',
headword: 'オリヴィア',
isNameMatch: true,
frequencyRank: 42,
startPos: 0,
endPos: 5,
}),
];
const result = annotateTokens(
tokens,
makeDeps({
getJlptLevel: () => {
jlptLookupCalls += 1;
return 'N2';
},
}),
{
nameMatchEnabled: true,
minSentenceWordsForNPlusOne: 1,
},
);
assert.equal(result[0]?.isNameMatch, true);
assert.equal(result[0]?.isNPlusOneTarget, false);
assert.equal(result[0]?.frequencyRank, undefined);
assert.equal(result[0]?.jlptLevel, undefined);
assert.equal(jlptLookupCalls, 0);
});
test('annotateTokens keeps other annotations for name matches when name highlighting is disabled', () => {
let jlptLookupCalls = 0;
const tokens = [
makeToken({
surface: 'オリヴィア',
reading: 'オリヴィア',
headword: 'オリヴィア',
isNameMatch: true,
frequencyRank: 42,
startPos: 0,
endPos: 5,
}),
];
const result = annotateTokens(
tokens,
makeDeps({
getJlptLevel: () => {
jlptLookupCalls += 1;
return 'N2';
},
}),
{
nameMatchEnabled: false,
minSentenceWordsForNPlusOne: 1,
},
);
assert.equal(result[0]?.isNameMatch, true);
assert.equal(result[0]?.isNPlusOneTarget, true);
assert.equal(result[0]?.frequencyRank, 42);
assert.equal(result[0]?.jlptLevel, 'N2');
assert.equal(jlptLookupCalls, 1);
});
test('annotateTokens N+1 handoff marks expected target when threshold is satisfied', () => {
const tokens = [
makeToken({ surface: '私', headword: '私', startPos: 0, endPos: 1 }),

View File

@@ -39,6 +39,7 @@ export interface AnnotationStageDeps {
export interface AnnotationStageOptions {
nPlusOneEnabled?: boolean;
nameMatchEnabled?: boolean;
jlptEnabled?: boolean;
frequencyEnabled?: boolean;
minSentenceWordsForNPlusOne?: number;
@@ -611,6 +612,13 @@ function computeTokenJlptLevel(
return level ?? undefined;
}
function hasPrioritizedNameMatch(
token: MergedToken,
options: Pick<AnnotationStageOptions, 'nameMatchEnabled'>,
): boolean {
return options.nameMatchEnabled !== false && token.isNameMatch === true;
}
export function annotateTokens(
tokens: MergedToken[],
deps: AnnotationStageDeps,
@@ -619,25 +627,31 @@ export function annotateTokens(
const pos1Exclusions = resolvePos1Exclusions(options);
const pos2Exclusions = resolvePos2Exclusions(options);
const nPlusOneEnabled = options.nPlusOneEnabled !== false;
const nameMatchEnabled = options.nameMatchEnabled !== false;
const frequencyEnabled = options.frequencyEnabled !== false;
const jlptEnabled = options.jlptEnabled !== false;
// Single pass: compute known word status, frequency filtering, and JLPT level together
const annotated = tokens.map((token) => {
const prioritizedNameMatch = nameMatchEnabled && token.isNameMatch === true;
const isKnown = nPlusOneEnabled
? computeTokenKnownStatus(token, deps.isKnownWord, deps.knownWordMatchMode)
: false;
const frequencyRank = frequencyEnabled
const frequencyRank = frequencyEnabled && !prioritizedNameMatch
? filterTokenFrequencyRank(token, pos1Exclusions, pos2Exclusions)
: undefined;
const jlptLevel = jlptEnabled ? computeTokenJlptLevel(token, deps.getJlptLevel) : undefined;
const jlptLevel =
jlptEnabled && !prioritizedNameMatch
? computeTokenJlptLevel(token, deps.getJlptLevel)
: undefined;
return {
...token,
isKnown,
isNPlusOneTarget: nPlusOneEnabled ? token.isNPlusOneTarget : false,
isNPlusOneTarget:
nPlusOneEnabled && !prioritizedNameMatch ? token.isNPlusOneTarget : false,
frequencyRank,
jlptLevel,
};
@@ -655,10 +669,25 @@ export function annotateTokens(
? minSentenceWordsForNPlusOne
: 3;
return markNPlusOneTargets(
const nPlusOneMarked = markNPlusOneTargets(
annotated,
sanitizedMinSentenceWordsForNPlusOne,
pos1Exclusions,
pos2Exclusions,
);
if (!nameMatchEnabled) {
return nPlusOneMarked;
}
return nPlusOneMarked.map((token) =>
hasPrioritizedNameMatch(token, options)
? {
...token,
isNPlusOneTarget: false,
frequencyRank: undefined,
jlptLevel: undefined,
}
: token,
);
}

View File

@@ -7,6 +7,7 @@ import { updateVisibleOverlayVisibility } from '../core/services';
export interface OverlayVisibilityRuntimeDeps {
getMainWindow: () => BrowserWindow | null;
getVisibleOverlayVisible: () => boolean;
getForceMousePassthrough: () => boolean;
getWindowTracker: () => BaseWindowTracker | null;
getTrackerNotReadyWarningShown: () => boolean;
setTrackerNotReadyWarningShown: (shown: boolean) => void;
@@ -32,6 +33,7 @@ export function createOverlayVisibilityRuntimeService(
updateVisibleOverlayVisibility(): void {
updateVisibleOverlayVisibility({
visibleOverlayVisible: deps.getVisibleOverlayVisible(),
forceMousePassthrough: deps.getForceMousePassthrough(),
mainWindow: deps.getMainWindow(),
windowTracker: deps.getWindowTracker(),
trackerNotReadyWarningShown: deps.getTrackerNotReadyWarningShown(),

View File

@@ -0,0 +1,55 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { handleCharacterDictionaryAutoSyncComplete } from './character-dictionary-auto-sync-completion';
test('character dictionary sync completion skips expensive subtitle refresh when dictionary is unchanged', () => {
const calls: string[] = [];
handleCharacterDictionaryAutoSyncComplete(
{
mediaId: 1,
mediaTitle: 'Frieren',
changed: false,
},
{
hasParserWindow: () => true,
clearParserCaches: () => calls.push('clear-parser'),
invalidateTokenizationCache: () => calls.push('invalidate'),
refreshSubtitlePrefetch: () => calls.push('prefetch'),
refreshCurrentSubtitle: () => calls.push('refresh-subtitle'),
logInfo: (message) => calls.push(`log:${message}`),
},
);
assert.deepEqual(calls, [
'log:[dictionary:auto-sync] refreshed current subtitle after sync (AniList 1, changed=no, title=Frieren)',
]);
});
test('character dictionary sync completion refreshes subtitle state when dictionary changed', () => {
const calls: string[] = [];
handleCharacterDictionaryAutoSyncComplete(
{
mediaId: 1,
mediaTitle: 'Frieren',
changed: true,
},
{
hasParserWindow: () => true,
clearParserCaches: () => calls.push('clear-parser'),
invalidateTokenizationCache: () => calls.push('invalidate'),
refreshSubtitlePrefetch: () => calls.push('prefetch'),
refreshCurrentSubtitle: () => calls.push('refresh-subtitle'),
logInfo: (message) => calls.push(`log:${message}`),
},
);
assert.deepEqual(calls, [
'clear-parser',
'invalidate',
'prefetch',
'refresh-subtitle',
'log:[dictionary:auto-sync] refreshed current subtitle after sync (AniList 1, changed=yes, title=Frieren)',
]);
});

View File

@@ -0,0 +1,27 @@
export function handleCharacterDictionaryAutoSyncComplete(
completion: {
mediaId: number;
mediaTitle: string;
changed: boolean;
},
deps: {
hasParserWindow: () => boolean;
clearParserCaches: () => void;
invalidateTokenizationCache: () => void;
refreshSubtitlePrefetch: () => void;
refreshCurrentSubtitle: () => void;
logInfo: (message: string) => void;
},
): void {
if (completion.changed) {
if (deps.hasParserWindow()) {
deps.clearParserCaches();
}
deps.invalidateTokenizationCache();
deps.refreshSubtitlePrefetch();
deps.refreshCurrentSubtitle();
}
deps.logInfo(
`[dictionary:auto-sync] refreshed current subtitle after sync (AniList ${completion.mediaId}, changed=${completion.changed ? 'yes' : 'no'}, title=${completion.mediaTitle})`,
);
}

View File

@@ -83,16 +83,16 @@ test('auto sync imports merged dictionary and persists MRU state', async () => {
const statePath = path.join(userDataPath, 'character-dictionaries', 'auto-sync-state.json');
const state = JSON.parse(fs.readFileSync(statePath, 'utf8')) as {
activeMediaIds: number[];
activeMediaIds: string[];
mergedRevision: string | null;
mergedDictionaryTitle: string | null;
};
assert.deepEqual(state.activeMediaIds, [130298]);
assert.deepEqual(state.activeMediaIds, ['130298 - The Eminence in Shadow']);
assert.equal(state.mergedRevision, 'rev-1');
assert.equal(state.mergedDictionaryTitle, 'SubMiner Character Dictionary');
assert.deepEqual(logs, [
'[dictionary:auto-sync] syncing current anime snapshot',
'[dictionary:auto-sync] active AniList media set: 130298',
'[dictionary:auto-sync] active AniList media set: 130298 - The Eminence in Shadow',
'[dictionary:auto-sync] rebuilding merged dictionary for active anime set',
'[dictionary:auto-sync] importing merged dictionary: /tmp/subminer-character-dictionary.zip',
'[dictionary:auto-sync] applying Yomitan settings for SubMiner Character Dictionary',
@@ -212,9 +212,9 @@ test('auto sync updates MRU order without rebuilding merged dictionary when memb
const statePath = path.join(userDataPath, 'character-dictionaries', 'auto-sync-state.json');
const state = JSON.parse(fs.readFileSync(statePath, 'utf8')) as {
activeMediaIds: number[];
activeMediaIds: string[];
};
assert.deepEqual(state.activeMediaIds, [1, 2]);
assert.deepEqual(state.activeMediaIds, ['1 - Title 1', '2 - Title 2']);
});
test('auto sync evicts least recently used media from merged set', async () => {
@@ -277,9 +277,9 @@ test('auto sync evicts least recently used media from merged set', async () => {
const statePath = path.join(userDataPath, 'character-dictionaries', 'auto-sync-state.json');
const state = JSON.parse(fs.readFileSync(statePath, 'utf8')) as {
activeMediaIds: number[];
activeMediaIds: string[];
};
assert.deepEqual(state.activeMediaIds, [4, 3, 2]);
assert.deepEqual(state.activeMediaIds, ['4 - Title 4', '3 - Title 3', '2 - Title 2']);
});
test('auto sync keeps revisited media retained when a new title is added afterward', async () => {
@@ -344,9 +344,9 @@ test('auto sync keeps revisited media retained when a new title is added afterwa
const statePath = path.join(userDataPath, 'character-dictionaries', 'auto-sync-state.json');
const state = JSON.parse(fs.readFileSync(statePath, 'utf8')) as {
activeMediaIds: number[];
activeMediaIds: string[];
};
assert.deepEqual(state.activeMediaIds, [1, 4, 3]);
assert.deepEqual(state.activeMediaIds, ['1 - Title 1', '4 - Title 4', '3 - Title 3']);
});
test('auto sync persists rebuilt MRU state even if Yomitan import fails afterward', async () => {
@@ -404,11 +404,11 @@ test('auto sync persists rebuilt MRU state even if Yomitan import fails afterwar
const state = JSON.parse(
fs.readFileSync(path.join(dictionariesDir, 'auto-sync-state.json'), 'utf8'),
) as {
activeMediaIds: number[];
activeMediaIds: string[];
mergedRevision: string | null;
mergedDictionaryTitle: string | null;
};
assert.deepEqual(state.activeMediaIds, [1, 2, 3]);
assert.deepEqual(state.activeMediaIds, ['1 - Title 1', '2', '3']);
assert.equal(state.mergedRevision, 'rev-1-2-3');
assert.equal(state.mergedDictionaryTitle, 'SubMiner Character Dictionary');
});

View File

@@ -7,8 +7,13 @@ import type {
MergedCharacterDictionaryBuildResult,
} from '../character-dictionary-runtime';
type AutoSyncMediaEntry = {
mediaId: number;
label: string;
};
type AutoSyncState = {
activeMediaIds: number[];
activeMediaIds: AutoSyncMediaEntry[];
mergedRevision: string | null;
mergedDictionaryTitle: string | null;
};
@@ -64,16 +69,66 @@ function ensureDir(dirPath: string): void {
}
}
function normalizeMediaId(rawMediaId: number): number | null {
const mediaId = Math.max(1, Math.floor(rawMediaId));
return Number.isFinite(mediaId) ? mediaId : null;
}
function parseActiveMediaEntry(rawEntry: unknown): AutoSyncMediaEntry | null {
if (typeof rawEntry === 'number') {
const mediaId = normalizeMediaId(rawEntry);
if (mediaId === null) {
return null;
}
return { mediaId, label: String(mediaId) };
}
if (typeof rawEntry !== 'string') {
return null;
}
const trimmed = rawEntry.trim();
if (!trimmed) {
return null;
}
const [rawId, ...rawTitleParts] = trimmed.split(' - ');
if (!rawId || !/^\d+$/.test(rawId)) {
return null;
}
const mediaId = normalizeMediaId(Number.parseInt(rawId ?? '', 10));
if (mediaId === null || mediaId <= 0) {
return null;
}
const rawLabel = rawTitleParts.length > 0 ? rawTitleParts.join(' - ').trim() : '';
return { mediaId, label: rawLabel ? `${mediaId} - ${rawLabel}` : String(mediaId) };
}
function buildActiveMediaLabel(mediaId: number, mediaTitle: string | null | undefined): string {
const normalizedId = normalizeMediaId(mediaId);
const trimmedTitle = typeof mediaTitle === 'string' ? mediaTitle.trim() : '';
if (normalizedId === null) {
return trimmedTitle;
}
return trimmedTitle.length > 0 ? `${normalizedId} - ${trimmedTitle}` : String(normalizedId);
}
function readAutoSyncState(statePath: string): AutoSyncState {
try {
const raw = fs.readFileSync(statePath, 'utf8');
const parsed = JSON.parse(raw) as Partial<AutoSyncState>;
const activeMediaIds = Array.isArray(parsed.activeMediaIds)
? parsed.activeMediaIds
.filter((value): value is number => typeof value === 'number' && Number.isFinite(value))
.map((value) => Math.max(1, Math.floor(value)))
.filter((value, index, all) => all.indexOf(value) === index)
: [];
const activeMediaIds: AutoSyncMediaEntry[] = [];
const activeMediaIdSet = new Set<number>();
if (Array.isArray(parsed.activeMediaIds)) {
for (const value of parsed.activeMediaIds) {
const entry = parseActiveMediaEntry(value);
if (entry && !activeMediaIdSet.has(entry.mediaId)) {
activeMediaIdSet.add(entry.mediaId);
activeMediaIds.push(entry);
}
}
}
return {
activeMediaIds,
mergedRevision:
@@ -96,7 +151,12 @@ function readAutoSyncState(statePath: string): AutoSyncState {
function writeAutoSyncState(statePath: string, state: AutoSyncState): void {
ensureDir(path.dirname(statePath));
fs.writeFileSync(statePath, JSON.stringify(state, null, 2), 'utf8');
const persistedState = {
activeMediaIds: state.activeMediaIds.map((entry) => entry.label),
mergedRevision: state.mergedRevision,
mergedDictionaryTitle: state.mergedDictionaryTitle,
};
fs.writeFileSync(statePath, JSON.stringify(persistedState, null, 2), 'utf8');
}
function arraysEqual(left: number[], right: number[]): boolean {
@@ -223,15 +283,22 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
});
const state = readAutoSyncState(statePath);
const nextActiveMediaIds = [
snapshot.mediaId,
...state.activeMediaIds.filter((mediaId) => mediaId !== snapshot.mediaId),
{
mediaId: snapshot.mediaId,
label: buildActiveMediaLabel(snapshot.mediaId, snapshot.mediaTitle),
},
...state.activeMediaIds.filter((entry) => entry.mediaId !== snapshot.mediaId),
].slice(0, Math.max(1, Math.floor(config.maxLoaded)));
const nextActiveMediaIdValues = nextActiveMediaIds.map((entry) => entry.mediaId);
deps.logInfo?.(
`[dictionary:auto-sync] active AniList media set: ${nextActiveMediaIds.join(', ')}`,
`[dictionary:auto-sync] active AniList media set: ${nextActiveMediaIds
.map((entry) => entry.label)
.join(', ')}`,
);
const retainedOrderChanged = !arraysEqual(nextActiveMediaIds, state.activeMediaIds);
const retainedMembershipChanged = !sameMembership(nextActiveMediaIds, state.activeMediaIds);
const stateMediaIds = state.activeMediaIds.map((entry) => entry.mediaId);
const retainedOrderChanged = !arraysEqual(nextActiveMediaIdValues, stateMediaIds);
const retainedMembershipChanged = !sameMembership(nextActiveMediaIdValues, stateMediaIds);
let merged: MergedCharacterDictionaryBuildResult | null = null;
if (
retainedMembershipChanged ||
@@ -244,9 +311,9 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
mediaId: snapshot.mediaId,
mediaTitle: snapshot.mediaTitle,
message: buildBuildingMessage(snapshot.mediaTitle),
});
});
deps.logInfo?.('[dictionary:auto-sync] rebuilding merged dictionary for active anime set');
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
merged = await deps.buildMergedDictionary(nextActiveMediaIdValues);
}
const dictionaryTitle = merged?.dictionaryTitle ?? state.mergedDictionaryTitle;
@@ -293,7 +360,7 @@ export function createCharacterDictionaryAutoSyncRuntimeService(
);
}
if (merged === null) {
merged = await deps.buildMergedDictionary(nextActiveMediaIds);
merged = await deps.buildMergedDictionary(nextActiveMediaIdValues);
}
deps.logInfo?.(`[dictionary:auto-sync] importing merged dictionary: ${merged.zipPath}`);
const imported = await withOperationTimeout(

View File

@@ -40,3 +40,19 @@ test('current media tokenization gate returns immediately for ready media', asyn
await gate.waitUntilReady('/tmp/video-1.mkv');
});
test('current media tokenization gate stays ready for later media after first warmup', async () => {
const gate = createCurrentMediaTokenizationGate();
gate.updateCurrentMediaPath('/tmp/video-1.mkv');
gate.markReady('/tmp/video-1.mkv');
gate.updateCurrentMediaPath('/tmp/video-2.mkv');
let resolved = false;
const waitPromise = gate.waitUntilReady('/tmp/video-2.mkv').then(() => {
resolved = true;
});
await Promise.resolve();
assert.equal(resolved, true);
await waitPromise;
});

View File

@@ -13,6 +13,7 @@ export function createCurrentMediaTokenizationGate(): {
} {
let currentMediaPath: string | null = null;
let readyMediaPath: string | null = null;
let warmupCompleted = false;
let pendingMediaPath: string | null = null;
let pendingPromise: Promise<void> | null = null;
let resolvePending: (() => void) | null = null;
@@ -43,6 +44,11 @@ export function createCurrentMediaTokenizationGate(): {
return;
}
currentMediaPath = normalizedPath;
if (warmupCompleted) {
readyMediaPath = normalizedPath;
resolvePendingWaiter();
return;
}
readyMediaPath = null;
resolvePendingWaiter();
if (normalizedPath) {
@@ -54,6 +60,7 @@ export function createCurrentMediaTokenizationGate(): {
if (!normalizedPath) {
return;
}
warmupCompleted = true;
readyMediaPath = normalizedPath;
if (pendingMediaPath === normalizedPath) {
resolvePendingWaiter();
@@ -61,7 +68,7 @@ export function createCurrentMediaTokenizationGate(): {
},
waitUntilReady: async (mediaPath) => {
const normalizedPath = normalizeMediaPath(mediaPath) ?? currentMediaPath;
if (!normalizedPath || readyMediaPath === normalizedPath) {
if (warmupCompleted || !normalizedPath || readyMediaPath === normalizedPath) {
return;
}
await ensurePendingPromise(normalizedPath);

View File

@@ -14,6 +14,7 @@ function makeConfig() {
retention: {
eventsDays: 14,
telemetryDays: 30,
sessionsDays: 45,
dailyRollupsDays: 180,
monthlyRollupsDays: 730,
vacuumIntervalDays: 7,
@@ -97,6 +98,7 @@ test('createImmersionTrackerStartupHandler creates tracker and auto-connects mpv
retention: {
eventsDays: 14,
telemetryDays: 30,
sessionsDays: 45,
dailyRollupsDays: 180,
monthlyRollupsDays: 730,
vacuumIntervalDays: 7,

View File

@@ -1,6 +1,7 @@
type ImmersionRetentionPolicy = {
eventsDays: number;
telemetryDays: number;
sessionsDays: number;
dailyRollupsDays: number;
monthlyRollupsDays: number;
vacuumIntervalDays: number;
@@ -77,6 +78,7 @@ export function createImmersionTrackerStartupHandler(
retention: {
eventsDays: policy.retention.eventsDays,
telemetryDays: policy.retention.telemetryDays,
sessionsDays: policy.retention.sessionsDays,
dailyRollupsDays: policy.retention.dailyRollupsDays,
monthlyRollupsDays: policy.retention.monthlyRollupsDays,
vacuumIntervalDays: policy.retention.vacuumIntervalDays,

View File

@@ -29,10 +29,13 @@ test('mpv connection handler reports stop and quits when disconnect guard passes
test('mpv connection handler syncs overlay subtitle suppression on connect', () => {
const calls: string[] = [];
const handler = createHandleMpvConnectionChangeHandler({
const deps: Parameters<typeof createHandleMpvConnectionChangeHandler>[0] & {
scheduleCharacterDictionarySync: () => void;
} = {
reportJellyfinRemoteStopped: () => calls.push('report-stop'),
refreshDiscordPresence: () => calls.push('presence-refresh'),
syncOverlayMpvSubtitleSuppression: () => calls.push('sync-overlay-mpv-sub'),
scheduleCharacterDictionarySync: () => calls.push('dict-sync'),
hasInitialJellyfinPlayArg: () => true,
isOverlayRuntimeInitialized: () => false,
isQuitOnDisconnectArmed: () => true,
@@ -41,7 +44,8 @@ test('mpv connection handler syncs overlay subtitle suppression on connect', ()
},
isMpvConnected: () => false,
quitApp: () => calls.push('quit'),
});
};
const handler = createHandleMpvConnectionChangeHandler(deps);
handler({ connected: true });

View File

@@ -22,7 +22,6 @@ export function createHandleMpvConnectionChangeHandler(deps: {
reportJellyfinRemoteStopped: () => void;
refreshDiscordPresence: () => void;
syncOverlayMpvSubtitleSuppression: () => void;
scheduleCharacterDictionarySync?: () => void;
hasInitialJellyfinPlayArg: () => boolean;
isOverlayRuntimeInitialized: () => boolean;
isQuitOnDisconnectArmed: () => boolean;
@@ -34,7 +33,6 @@ export function createHandleMpvConnectionChangeHandler(deps: {
deps.refreshDiscordPresence();
if (connected) {
deps.syncOverlayMpvSubtitleSuppression();
deps.scheduleCharacterDictionarySync?.();
return;
}
deps.reportJellyfinRemoteStopped();

View File

@@ -103,16 +103,19 @@ test('media path change handler signals autoplay-ready fast path for warm non-em
]);
});
test('media title change handler clears guess state and syncs immersion', () => {
test('media title change handler clears guess state without re-scheduling character dictionary sync', () => {
const calls: string[] = [];
const handler = createHandleMpvMediaTitleChangeHandler({
const deps: Parameters<typeof createHandleMpvMediaTitleChangeHandler>[0] & {
scheduleCharacterDictionarySync: () => void;
} = {
updateCurrentMediaTitle: (title) => calls.push(`title:${title}`),
resetAnilistMediaGuessState: () => calls.push('reset-guess'),
notifyImmersionTitleUpdate: (title) => calls.push(`notify:${title}`),
syncImmersionMediaState: () => calls.push('sync'),
scheduleCharacterDictionarySync: () => calls.push('dict-sync'),
refreshDiscordPresence: () => calls.push('presence'),
});
};
const handler = createHandleMpvMediaTitleChangeHandler(deps);
handler({ title: 'Episode 1' });
assert.deepEqual(calls, [
@@ -120,7 +123,6 @@ test('media title change handler clears guess state and syncs immersion', () =>
'reset-guess',
'notify:Episode 1',
'sync',
'dict-sync',
'presence',
]);
});

View File

@@ -70,7 +70,6 @@ export function createHandleMpvMediaTitleChangeHandler(deps: {
resetAnilistMediaGuessState: () => void;
notifyImmersionTitleUpdate: (title: string) => void;
syncImmersionMediaState: () => void;
scheduleCharacterDictionarySync?: () => void;
refreshDiscordPresence: () => void;
}) {
return ({ title }: { title: string | null }): void => {
@@ -79,9 +78,6 @@ export function createHandleMpvMediaTitleChangeHandler(deps: {
deps.resetAnilistMediaGuessState();
deps.notifyImmersionTitleUpdate(normalizedTitle);
deps.syncImmersionMediaState();
if (normalizedTitle.trim().length > 0) {
deps.scheduleCharacterDictionarySync?.();
}
deps.refreshDiscordPresence();
};
}

View File

@@ -72,7 +72,6 @@ export function createBindMpvMainEventHandlersHandler(deps: {
reportJellyfinRemoteStopped: () => deps.reportJellyfinRemoteStopped(),
refreshDiscordPresence: () => deps.refreshDiscordPresence(),
syncOverlayMpvSubtitleSuppression: () => deps.syncOverlayMpvSubtitleSuppression(),
scheduleCharacterDictionarySync: () => deps.scheduleCharacterDictionarySync?.(),
hasInitialJellyfinPlayArg: () => deps.hasInitialJellyfinPlayArg(),
isOverlayRuntimeInitialized: () => deps.isOverlayRuntimeInitialized(),
isQuitOnDisconnectArmed: () => deps.isQuitOnDisconnectArmed(),
@@ -119,7 +118,6 @@ export function createBindMpvMainEventHandlersHandler(deps: {
resetAnilistMediaGuessState: () => deps.resetAnilistMediaGuessState(),
notifyImmersionTitleUpdate: (title) => deps.notifyImmersionTitleUpdate(title),
syncImmersionMediaState: () => deps.syncImmersionMediaState(),
scheduleCharacterDictionarySync: () => deps.scheduleCharacterDictionarySync?.(),
refreshDiscordPresence: () => deps.refreshDiscordPresence(),
});
const handleMpvTimePosChange = createHandleMpvTimePosChangeHandler({

View File

@@ -13,6 +13,7 @@ test('overlay visibility runtime main deps builder maps state and geometry callb
const deps = createBuildOverlayVisibilityRuntimeMainDepsHandler({
getMainWindow: () => mainWindow,
getVisibleOverlayVisible: () => true,
getForceMousePassthrough: () => true,
getWindowTracker: () => tracker,
getTrackerNotReadyWarningShown: () => trackerNotReadyWarningShown,
setTrackerNotReadyWarningShown: (shown) => {
@@ -32,6 +33,7 @@ test('overlay visibility runtime main deps builder maps state and geometry callb
assert.equal(deps.getMainWindow(), mainWindow);
assert.equal(deps.getVisibleOverlayVisible(), true);
assert.equal(deps.getForceMousePassthrough(), true);
assert.equal(deps.getTrackerNotReadyWarningShown(), false);
deps.setTrackerNotReadyWarningShown(true);
deps.updateVisibleOverlayBounds({ x: 0, y: 0, width: 10, height: 10 });

View File

@@ -8,6 +8,7 @@ export function createBuildOverlayVisibilityRuntimeMainDepsHandler(
return (): OverlayVisibilityRuntimeDeps => ({
getMainWindow: () => deps.getMainWindow(),
getVisibleOverlayVisible: () => deps.getVisibleOverlayVisible(),
getForceMousePassthrough: () => deps.getForceMousePassthrough(),
getWindowTracker: () => deps.getWindowTracker(),
getTrackerNotReadyWarningShown: () => deps.getTrackerNotReadyWarningShown(),
setTrackerNotReadyWarningShown: (shown: boolean) => deps.setTrackerNotReadyWarningShown(shown),

View File

@@ -138,7 +138,7 @@ test('startup OSD shows dictionary failure after annotation loading completes',
]);
});
test('startup OSD reset requires the next media to wait for tokenization again', () => {
test('startup OSD reset keeps tokenization ready after first warmup', () => {
const osdMessages: string[] = [];
const sequencer = createStartupOsdSequencer({
showOsd: (message) => {
@@ -152,8 +152,5 @@ test('startup OSD reset requires the next media to wait for tokenization again',
makeDictionaryEvent('syncing', 'Updating character dictionary for Frieren...'),
);
assert.deepEqual(osdMessages, []);
sequencer.markTokenizationReady();
assert.deepEqual(osdMessages, ['Updating character dictionary for Frieren...']);
});

View File

@@ -11,6 +11,7 @@ export function createStartupOsdSequencer(deps: { showOsd: (message: string) =>
notifyCharacterDictionaryStatus: (event: StartupOsdSequencerCharacterDictionaryEvent) => void;
} {
let tokenizationReady = false;
let tokenizationWarmupCompleted = false;
let annotationLoadingMessage: string | null = null;
let pendingDictionaryProgress: StartupOsdSequencerCharacterDictionaryEvent | null = null;
let pendingDictionaryFailure: StartupOsdSequencerCharacterDictionaryEvent | null = null;
@@ -39,13 +40,14 @@ export function createStartupOsdSequencer(deps: { showOsd: (message: string) =>
return {
reset: () => {
tokenizationReady = false;
tokenizationReady = tokenizationWarmupCompleted;
annotationLoadingMessage = null;
pendingDictionaryProgress = null;
pendingDictionaryFailure = null;
dictionaryProgressShown = false;
},
markTokenizationReady: () => {
tokenizationWarmupCompleted = true;
tokenizationReady = true;
if (annotationLoadingMessage !== null) {
deps.showOsd(annotationLoadingMessage);

View File

@@ -1,5 +1,8 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { createRunStatsCliCommandHandler } from './stats-cli-command';
function makeHandler(
@@ -114,3 +117,245 @@ test('stats cli command runs vocab cleanup instead of opening dashboard when cle
},
]);
});
test('stats cli command runs lifetime rebuild when cleanup lifetime mode is requested', async () => {
const { handler, calls, responses } = makeHandler({
ensureVocabularyCleanupTokenizerReady: async () => {
calls.push('ensureVocabularyCleanupTokenizerReady');
},
getImmersionTracker: () => ({
rebuildLifetimeSummaries: async () => ({
appliedSessions: 4,
rebuiltAtMs: 1_710_000_000_000,
}),
}),
});
await handler(
{
statsResponsePath: '/tmp/subminer-stats-response.json',
statsCleanup: true,
statsCleanupLifetime: true,
},
'initial',
);
assert.deepEqual(calls, [
'ensureImmersionTrackerStarted',
'info:Stats lifetime rebuild complete: appliedSessions=4 rebuiltAtMs=1710000000000',
]);
assert.deepEqual(responses, [
{
responsePath: '/tmp/subminer-stats-response.json',
payload: { ok: true },
},
]);
});
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-stats-runtime-test-'));
return path.join(dir, 'immersion.sqlite');
}
function cleanupDbPath(dbPath: string): void {
fs.rmSync(path.dirname(dbPath), { recursive: true, force: true });
}
async function waitForPendingAnimeMetadata(
tracker: import('../../core/services/immersion-tracker-service').ImmersionTrackerService,
): Promise<void> {
const privateApi = tracker as unknown as {
sessionState: { videoId: number } | null;
pendingAnimeMetadataUpdates?: Map<number, Promise<void>>;
};
const videoId = privateApi.sessionState?.videoId;
if (!videoId) return;
await privateApi.pendingAnimeMetadataUpdates?.get(videoId);
}
test('tracker rebuildLifetimeSummaries backfills retained sessions and is idempotent', async () => {
const dbPath = makeDbPath();
let tracker:
| import('../../core/services/immersion-tracker-service').ImmersionTrackerService
| null = null;
let tracker2:
| import('../../core/services/immersion-tracker-service').ImmersionTrackerService
| null = null;
let tracker3:
| import('../../core/services/immersion-tracker-service').ImmersionTrackerService
| null = null;
const { ImmersionTrackerService } = await import('../../core/services/immersion-tracker-service');
const { Database } = await import('../../core/services/immersion-tracker/sqlite');
try {
tracker = new ImmersionTrackerService({ dbPath });
tracker.handleMediaChange('/tmp/Frieren S01E01.mkv', 'Episode 1');
await waitForPendingAnimeMetadata(tracker);
tracker.recordCardsMined(2);
tracker.recordSubtitleLine('first line', 0, 1);
tracker.destroy();
tracker = null;
tracker2 = new ImmersionTrackerService({ dbPath });
tracker2.handleMediaChange('/tmp/Frieren S01E02.mkv', 'Episode 2');
await waitForPendingAnimeMetadata(tracker2);
tracker2.recordCardsMined(1);
tracker2.recordSubtitleLine('second line', 0, 1);
tracker2.destroy();
tracker2 = null;
const beforeDb = new Database(dbPath);
const expectedGlobal = beforeDb
.prepare(
`
SELECT total_sessions, total_cards, episodes_started, active_days
FROM imm_lifetime_global
`,
)
.get() as {
total_sessions: number;
total_cards: number;
episodes_started: number;
active_days: number;
} | null;
const expectedAnimeRows = (
beforeDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_anime').get() as {
total: number;
}
).total;
const expectedMediaRows = (
beforeDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media').get() as {
total: number;
}
).total;
const expectedAppliedSessions = (
beforeDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions').get() as {
total: number;
}
).total;
beforeDb.exec(`
DELETE FROM imm_lifetime_anime;
DELETE FROM imm_lifetime_media;
DELETE FROM imm_lifetime_applied_sessions;
UPDATE imm_lifetime_global
SET total_sessions = 999,
total_cards = 999,
episodes_started = 999,
active_days = 999
WHERE global_id = 1;
`);
beforeDb.close();
tracker3 = new ImmersionTrackerService({ dbPath });
const firstRebuild = await tracker3.rebuildLifetimeSummaries();
const secondRebuild = await tracker3.rebuildLifetimeSummaries();
const rebuiltDb = new Database(dbPath);
const rebuiltGlobal = rebuiltDb
.prepare(
`
SELECT total_sessions, total_cards, episodes_started, active_days
FROM imm_lifetime_global
`,
)
.get() as {
total_sessions: number;
total_cards: number;
episodes_started: number;
active_days: number;
} | null;
const rebuiltAnimeRows = (
rebuiltDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_anime').get() as {
total: number;
}
).total;
const rebuiltMediaRows = (
rebuiltDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_media').get() as {
total: number;
}
).total;
const rebuiltAppliedSessions = (
rebuiltDb.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions').get() as {
total: number;
}
).total;
rebuiltDb.close();
assert.ok(rebuiltGlobal);
assert.ok(expectedGlobal);
assert.equal(rebuiltGlobal?.total_sessions, expectedGlobal?.total_sessions);
assert.equal(rebuiltGlobal?.total_cards, expectedGlobal?.total_cards);
assert.equal(rebuiltGlobal?.episodes_started, expectedGlobal?.episodes_started);
assert.equal(rebuiltGlobal?.active_days, expectedGlobal?.active_days);
assert.equal(rebuiltAnimeRows, expectedAnimeRows);
assert.equal(rebuiltMediaRows, expectedMediaRows);
assert.equal(rebuiltAppliedSessions, expectedAppliedSessions);
assert.equal(firstRebuild.appliedSessions, expectedAppliedSessions);
assert.equal(secondRebuild.appliedSessions, firstRebuild.appliedSessions);
assert.ok(secondRebuild.rebuiltAtMs >= firstRebuild.rebuiltAtMs);
} finally {
tracker?.destroy();
tracker2?.destroy();
tracker3?.destroy();
cleanupDbPath(dbPath);
}
});
test('stats cli command runs lifetime rebuild when requested', async () => {
const { handler, calls, responses } = makeHandler({
getImmersionTracker: () => ({
rebuildLifetimeSummaries: async () => ({
appliedSessions: 4,
rebuiltAtMs: 1_710_000_000_000,
}),
}),
});
await handler(
{
statsResponsePath: '/tmp/subminer-stats-response.json',
statsCleanup: true,
statsCleanupLifetime: true,
},
'initial',
);
assert.deepEqual(calls, [
'ensureImmersionTrackerStarted',
'info:Stats lifetime rebuild complete: appliedSessions=4 rebuiltAtMs=1710000000000',
]);
assert.deepEqual(responses, [
{
responsePath: '/tmp/subminer-stats-response.json',
payload: { ok: true },
},
]);
});
test('stats cli command rejects cleanup calls without exactly one cleanup mode', async () => {
const { handler, calls, responses } = makeHandler({
getImmersionTracker: () => ({
cleanupVocabularyStats: async () => ({ scanned: 1, kept: 1, deleted: 0, repaired: 0 }),
rebuildLifetimeSummaries: async () => ({ appliedSessions: 0, rebuiltAtMs: 0 }),
}),
});
await handler(
{
statsResponsePath: '/tmp/subminer-stats-response.json',
statsCleanup: true,
statsCleanupVocab: true,
statsCleanupLifetime: true,
},
'initial',
);
assert.ok(calls.includes('error:Stats command failed:Choose exactly one stats cleanup mode.'));
assert.deepEqual(responses, [
{
responsePath: '/tmp/subminer-stats-response.json',
payload: { ok: false, error: 'Choose exactly one stats cleanup mode.' },
},
]);
});

View File

@@ -1,7 +1,10 @@
import fs from 'node:fs';
import path from 'node:path';
import type { CliArgs, CliCommandSource } from '../../cli/args';
import type { VocabularyCleanupSummary } from '../../core/services/immersion-tracker/types';
import type {
LifetimeRebuildSummary,
VocabularyCleanupSummary,
} from '../../core/services/immersion-tracker/types';
type StatsCliConfig = {
immersionTracking?: {
@@ -33,6 +36,7 @@ export function createRunStatsCliCommandHandler(deps: {
ensureVocabularyCleanupTokenizerReady?: () => Promise<void> | void;
getImmersionTracker: () => {
cleanupVocabularyStats?: () => Promise<VocabularyCleanupSummary>;
rebuildLifetimeSummaries?: () => Promise<LifetimeRebuildSummary>;
} | null;
ensureStatsServerStarted: () => string;
openExternal: (url: string) => Promise<unknown>;
@@ -55,7 +59,10 @@ export function createRunStatsCliCommandHandler(deps: {
};
return async (
args: Pick<CliArgs, 'statsResponsePath' | 'statsCleanup' | 'statsCleanupVocab'>,
args: Pick<
CliArgs,
'statsResponsePath' | 'statsCleanup' | 'statsCleanupVocab' | 'statsCleanupLifetime'
>,
source: CliCommandSource,
): Promise<void> => {
try {
@@ -71,13 +78,31 @@ export function createRunStatsCliCommandHandler(deps: {
}
if (args.statsCleanup) {
await deps.ensureVocabularyCleanupTokenizerReady?.();
if (!args.statsCleanupVocab || !tracker.cleanupVocabularyStats) {
const cleanupModes = [
args.statsCleanupVocab ? 'vocab' : null,
args.statsCleanupLifetime ? 'lifetime' : null,
].filter(Boolean);
if (cleanupModes.length !== 1) {
throw new Error('Choose exactly one stats cleanup mode.');
}
if (args.statsCleanupVocab) {
await deps.ensureVocabularyCleanupTokenizerReady?.();
}
if (args.statsCleanupVocab && tracker.cleanupVocabularyStats) {
const result = await tracker.cleanupVocabularyStats();
deps.logInfo(
`Stats vocabulary cleanup complete: scanned=${result.scanned} kept=${result.kept} deleted=${result.deleted} repaired=${result.repaired}`,
);
writeResponseSafe(args.statsResponsePath, { ok: true });
return;
}
if (!args.statsCleanupLifetime || !tracker.rebuildLifetimeSummaries) {
throw new Error('Stats cleanup mode is not available.');
}
const result = await tracker.cleanupVocabularyStats();
const result = await tracker.rebuildLifetimeSummaries();
deps.logInfo(
`Stats vocabulary cleanup complete: scanned=${result.scanned} kept=${result.kept} deleted=${result.deleted} repaired=${result.repaired}`,
`Stats lifetime rebuild complete: appliedSessions=${result.appliedSessions} rebuiltAtMs=${result.rebuiltAtMs}`,
);
writeResponseSafe(args.statsResponsePath, { ok: true });
return;

View File

@@ -183,6 +183,7 @@ export interface AppState {
runtimeOptionsManager: RuntimeOptionsManager | null;
trackerNotReadyWarningShown: boolean;
overlayDebugVisualizationEnabled: boolean;
statsOverlayVisible: boolean;
subsyncInProgress: boolean;
initialArgs: CliArgs | null;
mpvSocketPath: string;
@@ -260,6 +261,7 @@ export function createAppState(values: AppStateInitialValues): AppState {
runtimeOptionsManager: null,
trackerNotReadyWarningShown: false,
overlayDebugVisualizationEnabled: false,
statsOverlayVisible: false,
shortcutsRegistered: false,
overlayRuntimeInitialized: false,
fieldGroupingResolver: null,

View File

@@ -287,7 +287,7 @@ function createKeyboardHandlerHarness() {
});
let wordNodes = [createWordNode(10), createWordNode(80), createWordNode(150)];
const ctx = {
const ctx = {
dom: {
subtitleRoot: {
classList: subtitleRootClassList,
@@ -301,6 +301,7 @@ function createKeyboardHandlerHarness() {
platform: {
shouldToggleMouseIgnore: false,
isMacOSPlatform: false,
isModalLayer: false,
overlayLayer: 'always-on-top',
},
state: createRendererState(),
@@ -646,6 +647,42 @@ test('keyboard mode: opening lookup restores overlay keyboard focus', async () =
}
});
test('keyboard mode: visible-layer Ctrl+Shift+Y should not be toggled by renderer keydown', async () => {
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
ctx.platform.isModalLayer = false;
testGlobals.dispatchKeydown({ key: 'Y', code: 'KeyY', ctrlKey: true, shiftKey: true });
assert.equal(ctx.state.keyboardDrivenModeEnabled, false);
handlers.handleKeyboardModeToggleRequested();
assert.equal(ctx.state.keyboardDrivenModeEnabled, true);
} finally {
ctx.state.keyboardDrivenModeEnabled = false;
testGlobals.restore();
}
});
test('keyboard mode: modal-layer Ctrl+Shift+Y still toggles via renderer keydown', async () => {
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
try {
await handlers.setupMpvInputForwarding();
ctx.platform.isModalLayer = true;
testGlobals.dispatchKeydown({ key: 'Y', code: 'KeyY', ctrlKey: true, shiftKey: true });
assert.equal(ctx.state.keyboardDrivenModeEnabled, true);
testGlobals.dispatchKeydown({ key: 'Y', code: 'KeyY', ctrlKey: true, shiftKey: true });
assert.equal(ctx.state.keyboardDrivenModeEnabled, false);
} finally {
ctx.state.keyboardDrivenModeEnabled = false;
testGlobals.restore();
}
});
test('keyboard mode: turning mode off clears selected token highlight', async () => {
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();

View File

@@ -759,7 +759,7 @@ export function createKeyboardHandlers(
);
document.addEventListener('keydown', (e: KeyboardEvent) => {
if (isKeyboardDrivenModeToggle(e)) {
if (isKeyboardDrivenModeToggle(e) && ctx.platform.isModalLayer) {
e.preventDefault();
handleKeyboardModeToggleRequested();
return;

View File

@@ -236,9 +236,11 @@ test('computeWordClass preserves known and n+1 classes while adding JLPT classes
assert.equal(computeWordClass(nPlusOneJlpt), 'word word-n-plus-one word-jlpt-n2');
});
test('computeWordClass applies name-match class ahead of known and frequency classes', () => {
test('computeWordClass applies name-match class ahead of known, n+1, frequency, and JLPT classes', () => {
const token = createToken({
isKnown: true,
isNPlusOneTarget: true,
jlptLevel: 'N2',
frequencyRank: 10,
surface: 'アクア',
}) as MergedToken & { isNameMatch?: boolean };
@@ -511,19 +513,29 @@ test('getFrequencyRankLabelForToken returns rank only for frequency-colored toke
const knownToken = createToken({ surface: '既知', isKnown: true, frequencyRank: 20 });
const nPlusOneToken = createToken({ surface: '目標', isNPlusOneTarget: true, frequencyRank: 20 });
const outOfRangeToken = createToken({ surface: '圏外', frequencyRank: 1000 });
const nameToken = createToken({ surface: 'アクア', frequencyRank: 20 }) as MergedToken & {
isNameMatch?: boolean;
};
nameToken.isNameMatch = true;
assert.equal(getFrequencyRankLabelForToken(frequencyToken, settings), '20');
assert.equal(getFrequencyRankLabelForToken(knownToken, settings), '20');
assert.equal(getFrequencyRankLabelForToken(nPlusOneToken, settings), '20');
assert.equal(getFrequencyRankLabelForToken(outOfRangeToken, settings), null);
assert.equal(getFrequencyRankLabelForToken(nameToken, { ...settings, nameMatchEnabled: true }), null);
});
test('getJlptLevelLabelForToken returns level when token has jlpt metadata', () => {
const jlptToken = createToken({ surface: '語彙', jlptLevel: 'N2' });
const noJlptToken = createToken({ surface: '語彙' });
const nameToken = createToken({ surface: 'アクア', jlptLevel: 'N5' }) as MergedToken & {
isNameMatch?: boolean;
};
nameToken.isNameMatch = true;
assert.equal(getJlptLevelLabelForToken(jlptToken), 'N2');
assert.equal(getJlptLevelLabelForToken(noJlptToken), null);
assert.equal(getJlptLevelLabelForToken(nameToken, { nameMatchEnabled: true }), null);
});
test('sanitizeSubtitleHoverTokenColor falls back for pure black values', () => {

View File

@@ -91,6 +91,13 @@ const DEFAULT_FREQUENCY_RENDER_SETTINGS: FrequencyRenderSettings = {
};
const DEFAULT_NAME_MATCH_ENABLED = true;
function hasPrioritizedNameMatch(
token: MergedToken,
tokenRenderSettings?: Partial<Pick<TokenRenderSettings, 'nameMatchEnabled'>>,
): boolean {
return (tokenRenderSettings?.nameMatchEnabled ?? DEFAULT_NAME_MATCH_ENABLED) && token.isNameMatch === true;
}
function sanitizeFrequencyTopX(value: unknown, fallback: number): number {
if (typeof value !== 'number' || !Number.isFinite(value) || value <= 0) {
return fallback;
@@ -227,8 +234,12 @@ function getNormalizedFrequencyRank(token: MergedToken): number | null {
export function getFrequencyRankLabelForToken(
token: MergedToken,
frequencySettings?: Partial<FrequencyRenderSettings>,
frequencySettings?: Partial<TokenRenderSettings>,
): string | null {
if (hasPrioritizedNameMatch(token, frequencySettings)) {
return null;
}
const resolvedFrequencySettings = {
...DEFAULT_FREQUENCY_RENDER_SETTINGS,
...frequencySettings,
@@ -251,7 +262,14 @@ export function getFrequencyRankLabelForToken(
return rank === null ? null : String(rank);
}
export function getJlptLevelLabelForToken(token: MergedToken): string | null {
export function getJlptLevelLabelForToken(
token: MergedToken,
tokenRenderSettings?: Partial<Pick<TokenRenderSettings, 'nameMatchEnabled'>>,
): string | null {
if (hasPrioritizedNameMatch(token, tokenRenderSettings)) {
return null;
}
return token.jlptLevel ?? null;
}
@@ -304,7 +322,7 @@ function renderWithTokens(
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
}
const jlptLevelLabel = getJlptLevelLabelForToken(token);
const jlptLevelLabel = getJlptLevelLabelForToken(token, resolvedTokenRenderSettings);
if (jlptLevelLabel) {
span.dataset.jlptLevel = jlptLevelLabel;
}
@@ -340,7 +358,7 @@ function renderWithTokens(
if (frequencyRankLabel) {
span.dataset.frequencyRank = frequencyRankLabel;
}
const jlptLevelLabel = getJlptLevelLabelForToken(token);
const jlptLevelLabel = getJlptLevelLabelForToken(token, resolvedTokenRenderSettings);
if (jlptLevelLabel) {
span.dataset.jlptLevel = jlptLevelLabel;
}
@@ -452,22 +470,22 @@ export function computeWordClass(
const classes = ['word'];
if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (resolvedTokenRenderSettings.nameMatchEnabled && token.isNameMatch) {
if (hasPrioritizedNameMatch(token, resolvedTokenRenderSettings)) {
classes.push('word-name-match');
} else if (token.isNPlusOneTarget) {
classes.push('word-n-plus-one');
} else if (token.isKnown) {
classes.push('word-known');
}
if (token.jlptLevel) {
if (!hasPrioritizedNameMatch(token, resolvedTokenRenderSettings) && token.jlptLevel) {
classes.push(`word-jlpt-${token.jlptLevel.toLowerCase()}`);
}
if (
!token.isKnown &&
!token.isNPlusOneTarget &&
!(resolvedTokenRenderSettings.nameMatchEnabled && token.isNameMatch)
!hasPrioritizedNameMatch(token, resolvedTokenRenderSettings)
) {
const frequencyClass = getFrequencyDictionaryClass(token, resolvedTokenRenderSettings);
if (frequencyClass) {

View File

@@ -630,6 +630,9 @@ export interface StatsConfig {
autoOpenBrowser?: boolean;
}
export type ImmersionTrackingRetentionMode = 'preset' | 'advanced';
export type ImmersionTrackingRetentionPreset = 'minimal' | 'balanced' | 'deep-history';
export interface ImmersionTrackingConfig {
enabled?: boolean;
dbPath?: string;
@@ -638,13 +641,21 @@ export interface ImmersionTrackingConfig {
queueCap?: number;
payloadCapBytes?: number;
maintenanceIntervalMs?: number;
retentionMode?: ImmersionTrackingRetentionMode;
retentionPreset?: ImmersionTrackingRetentionPreset;
retention?: {
eventsDays?: number;
telemetryDays?: number;
sessionsDays?: number;
dailyRollupsDays?: number;
monthlyRollupsDays?: number;
vacuumIntervalDays?: number;
};
lifetimeSummaries?: {
global?: boolean;
anime?: boolean;
media?: boolean;
};
}
export interface Config {
@@ -859,13 +870,21 @@ export interface ResolvedConfig {
queueCap: number;
payloadCapBytes: number;
maintenanceIntervalMs: number;
retentionMode: ImmersionTrackingRetentionMode;
retentionPreset: ImmersionTrackingRetentionPreset;
retention: {
eventsDays: number;
telemetryDays: number;
sessionsDays: number;
dailyRollupsDays: number;
monthlyRollupsDays: number;
vacuumIntervalDays: number;
};
lifetimeSummaries: {
global: boolean;
anime: boolean;
media: boolean;
};
};
stats: {
toggleKey: string;

View File

@@ -0,0 +1,172 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { MacOSWindowTracker } from './macos-tracker';
test('MacOSWindowTracker keeps the last geometry through a single helper miss', async () => {
let callIndex = 0;
const outputs = [
{ stdout: '10,20,1280,720,1', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: '10,20,1280,720,1', stderr: '' },
];
const tracker = new MacOSWindowTracker('/tmp/mpv.sock', {
resolveHelper: () => ({
helperPath: 'helper.swift',
helperType: 'swift',
}),
runHelper: async () => outputs[callIndex++] ?? outputs.at(-1)!,
trackingLossGraceMs: 0,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.deepEqual(tracker.getGeometry(), {
x: 10,
y: 20,
width: 1280,
height: 720,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.deepEqual(tracker.getGeometry(), {
x: 10,
y: 20,
width: 1280,
height: 720,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.deepEqual(tracker.getGeometry(), {
x: 10,
y: 20,
width: 1280,
height: 720,
});
});
test('MacOSWindowTracker drops tracking after consecutive helper misses', async () => {
let callIndex = 0;
const outputs = [
{ stdout: '10,20,1280,720,1', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
];
const tracker = new MacOSWindowTracker('/tmp/mpv.sock', {
resolveHelper: () => ({
helperPath: 'helper.swift',
helperType: 'swift',
}),
runHelper: async () => outputs[callIndex++] ?? outputs.at(-1)!,
trackingLossGraceMs: 0,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), false);
assert.equal(tracker.getGeometry(), null);
});
test('MacOSWindowTracker keeps tracking through repeated helper misses inside grace window', async () => {
let callIndex = 0;
let now = 1_000;
const outputs = [
{ stdout: '10,20,1280,720,1', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
];
const tracker = new MacOSWindowTracker('/tmp/mpv.sock', {
resolveHelper: () => ({
helperPath: 'helper.swift',
helperType: 'swift',
}),
runHelper: async () => outputs[callIndex++] ?? outputs.at(-1)!,
now: () => now,
trackingLossGraceMs: 1_500,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
assert.deepEqual(tracker.getGeometry(), {
x: 10,
y: 20,
width: 1280,
height: 720,
});
});
test('MacOSWindowTracker drops tracking after grace window expires', async () => {
let callIndex = 0;
let now = 1_000;
const outputs = [
{ stdout: '10,20,1280,720,1', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
{ stdout: 'not-found', stderr: '' },
];
const tracker = new MacOSWindowTracker('/tmp/mpv.sock', {
resolveHelper: () => ({
helperPath: 'helper.swift',
helperType: 'swift',
}),
runHelper: async () => outputs[callIndex++] ?? outputs.at(-1)!,
now: () => now,
trackingLossGraceMs: 500,
});
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), true);
now += 250;
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
await new Promise((resolve) => setTimeout(resolve, 0));
assert.equal(tracker.isTracking(), false);
assert.equal(tracker.getGeometry(), null);
});

View File

@@ -26,11 +26,62 @@ import type { WindowGeometry } from '../types';
const log = createLogger('tracker').child('macos');
type MacOSTrackerRunnerResult = {
stdout: string;
stderr: string;
};
type MacOSTrackerDeps = {
resolveHelper?: () => { helperPath: string; helperType: 'binary' | 'swift' } | null;
runHelper?: (
helperPath: string,
helperType: 'binary' | 'swift',
targetMpvSocketPath: string | null,
) => Promise<MacOSTrackerRunnerResult>;
maxConsecutiveMisses?: number;
trackingLossGraceMs?: number;
now?: () => number;
};
export interface MacOSHelperWindowState {
geometry: WindowGeometry;
focused: boolean;
}
function runHelperWithExecFile(
helperPath: string,
helperType: 'binary' | 'swift',
targetMpvSocketPath: string | null,
): Promise<MacOSTrackerRunnerResult> {
return new Promise((resolve, reject) => {
const command = helperType === 'binary' ? helperPath : 'swift';
const args = helperType === 'binary' ? [] : [helperPath];
if (targetMpvSocketPath) {
args.push(targetMpvSocketPath);
}
execFile(
command,
args,
{
encoding: 'utf-8',
timeout: 1000,
maxBuffer: 1024 * 1024,
},
(error, stdout, stderr) => {
if (error) {
reject(Object.assign(error, { stderr }));
return;
}
resolve({
stdout: stdout || '',
stderr: stderr || '',
});
},
);
});
}
export function parseMacOSHelperOutput(result: string): MacOSHelperWindowState | null {
const trimmed = result.trim();
if (!trimmed || trimmed === 'not-found') {
@@ -79,11 +130,31 @@ export class MacOSWindowTracker extends BaseWindowTracker {
private lastExecErrorFingerprint: string | null = null;
private lastExecErrorLoggedAtMs = 0;
private readonly targetMpvSocketPath: string | null;
private readonly runHelper: (
helperPath: string,
helperType: 'binary' | 'swift',
targetMpvSocketPath: string | null,
) => Promise<MacOSTrackerRunnerResult>;
private readonly maxConsecutiveMisses: number;
private readonly trackingLossGraceMs: number;
private readonly now: () => number;
private consecutiveMisses = 0;
private trackingLossStartedAtMs: number | null = null;
constructor(targetMpvSocketPath?: string) {
constructor(targetMpvSocketPath?: string, deps: MacOSTrackerDeps = {}) {
super();
this.targetMpvSocketPath = targetMpvSocketPath?.trim() || null;
this.detectHelper();
this.runHelper = deps.runHelper ?? runHelperWithExecFile;
this.maxConsecutiveMisses = Math.max(1, Math.floor(deps.maxConsecutiveMisses ?? 2));
this.trackingLossGraceMs = Math.max(0, Math.floor(deps.trackingLossGraceMs ?? 1_500));
this.now = deps.now ?? (() => Date.now());
const resolvedHelper = deps.resolveHelper?.() ?? null;
if (resolvedHelper) {
this.helperPath = resolvedHelper.helperPath;
this.helperType = resolvedHelper.helperType;
} else {
this.detectHelper();
}
}
private materializeAsarHelper(sourcePath: string, helperType: 'binary' | 'swift'): string | null {
@@ -188,48 +259,65 @@ export class MacOSWindowTracker extends BaseWindowTracker {
}
}
private resetTrackingLossState(): void {
this.consecutiveMisses = 0;
this.trackingLossStartedAtMs = null;
}
private shouldDropTracking(): boolean {
if (!this.isTracking()) {
return true;
}
if (this.trackingLossGraceMs === 0) {
return this.consecutiveMisses >= this.maxConsecutiveMisses;
}
if (this.trackingLossStartedAtMs === null) {
this.trackingLossStartedAtMs = this.now();
return false;
}
return this.now() - this.trackingLossStartedAtMs > this.trackingLossGraceMs;
}
private registerTrackingMiss(): void {
this.consecutiveMisses += 1;
if (this.shouldDropTracking()) {
this.updateGeometry(null);
this.resetTrackingLossState();
}
}
private pollGeometry(): void {
if (this.pollInFlight || !this.helperPath || !this.helperType) {
return;
}
this.pollInFlight = true;
// Use Core Graphics API via Swift helper for reliable window detection
// This works with both bundled and unbundled mpv installations
const command = this.helperType === 'binary' ? this.helperPath : 'swift';
const args = this.helperType === 'binary' ? [] : [this.helperPath];
if (this.targetMpvSocketPath) {
args.push(this.targetMpvSocketPath);
}
execFile(
command,
args,
{
encoding: 'utf-8',
timeout: 1000,
maxBuffer: 1024 * 1024,
},
(err, stdout, stderr) => {
if (err) {
this.maybeLogExecError(err, stderr || '');
this.updateGeometry(null);
this.pollInFlight = false;
return;
}
void this.runHelper(this.helperPath, this.helperType, this.targetMpvSocketPath)
.then(({ stdout }) => {
const parsed = parseMacOSHelperOutput(stdout || '');
if (parsed) {
this.resetTrackingLossState();
this.updateFocus(parsed.focused);
this.updateGeometry(parsed.geometry);
this.pollInFlight = false;
return;
}
this.updateGeometry(null);
this.registerTrackingMiss();
})
.catch((error: unknown) => {
const err = error instanceof Error ? error : new Error(String(error));
const stderr =
typeof error === 'object' &&
error !== null &&
'stderr' in error &&
typeof (error as { stderr?: unknown }).stderr === 'string'
? (error as { stderr: string }).stderr
: '';
this.maybeLogExecError(err, stderr);
this.registerTrackingMiss();
})
.finally(() => {
this.pollInFlight = false;
},
);
});
}
}