Fix verification and immersion-tracker grouping

- isolate verifier artifacts and lease handling
- switch weekly/monthly tracker cutoffs to calendar boundaries
- tighten boot lifecycle and zip writer tests
This commit is contained in:
2026-03-28 00:01:17 -07:00
parent 1408ad652a
commit 8e5cb5f885
11 changed files with 515 additions and 37 deletions

View File

@@ -787,6 +787,208 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
}
});
test('getTrendsDashboard month grouping spans every touched calendar month and keeps progress monthly', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
const RealDate = Date;
class MockDate extends Date {
constructor(...args: ConstructorParameters<typeof Date>) {
super(...(args.length === 0 ? [new RealDate(2026, 2, 1, 12, 0, 0).getTime()] : args));
}
static override now(): number {
return new RealDate(2026, 2, 1, 12, 0, 0).getTime();
}
}
try {
globalThis.Date = MockDate as DateConstructor;
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
const febVideoId = getOrCreateVideoRecord(db, 'local:/tmp/feb-trends.mkv', {
canonicalTitle: 'Monthly Trends',
sourcePath: '/tmp/feb-trends.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const marVideoId = getOrCreateVideoRecord(db, 'local:/tmp/mar-trends.mkv', {
canonicalTitle: 'Monthly Trends',
sourcePath: '/tmp/mar-trends.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'Monthly Trends',
canonicalTitle: 'Monthly Trends',
anilistId: null,
titleRomaji: null,
titleEnglish: null,
titleNative: null,
metadataJson: null,
});
linkVideoToAnimeRecord(db, febVideoId, {
animeId,
parsedBasename: 'feb-trends.mkv',
parsedTitle: 'Monthly Trends',
parsedSeason: 1,
parsedEpisode: 1,
parserSource: 'test',
parserConfidence: 1,
parseMetadataJson: null,
});
linkVideoToAnimeRecord(db, marVideoId, {
animeId,
parsedBasename: 'mar-trends.mkv',
parsedTitle: 'Monthly Trends',
parsedSeason: 1,
parsedEpisode: 2,
parserSource: 'test',
parserConfidence: 1,
parseMetadataJson: null,
});
const febStartedAtMs = new RealDate(2026, 1, 15, 20, 0, 0).getTime();
const marStartedAtMs = new RealDate(2026, 2, 1, 9, 0, 0).getTime();
const febSessionId = startSessionRecord(db, febVideoId, febStartedAtMs).sessionId;
const marSessionId = startSessionRecord(db, marVideoId, marStartedAtMs).sessionId;
for (const [sessionId, startedAtMs, tokensSeen, cardsMined, yomitanLookupCount] of [
[febSessionId, febStartedAtMs, 100, 2, 3],
[marSessionId, marStartedAtMs, 120, 4, 5],
] as const) {
stmts.telemetryInsertStmt.run(
sessionId,
startedAtMs + 60_000,
30 * 60_000,
30 * 60_000,
4,
tokensSeen,
cardsMined,
yomitanLookupCount,
yomitanLookupCount,
yomitanLookupCount,
0,
0,
0,
0,
startedAtMs + 60_000,
startedAtMs + 60_000,
);
db.prepare(
`
UPDATE imm_sessions
SET
ended_at_ms = ?,
status = 2,
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
lookup_hits = ?,
yomitan_lookup_count = ?,
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
).run(
startedAtMs + 60_000,
30 * 60_000,
30 * 60_000,
4,
tokensSeen,
cardsMined,
yomitanLookupCount,
yomitanLookupCount,
yomitanLookupCount,
startedAtMs + 60_000,
sessionId,
);
}
const insertDailyRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const insertMonthlyRollup = db.prepare(
`
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const febEpochDay = Math.floor(febStartedAtMs / 86_400_000);
const marEpochDay = Math.floor(marStartedAtMs / 86_400_000);
insertDailyRollup.run(febEpochDay, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertDailyRollup.run(marEpochDay, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
insertMonthlyRollup.run(202602, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertMonthlyRollup.run(202603, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
'二月',
'二月',
'にがつ',
'noun',
'名詞',
'',
'',
Math.floor(febStartedAtMs / 1000),
Math.floor(febStartedAtMs / 1000),
1,
);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
'三月',
'三月',
'さんがつ',
'noun',
'名詞',
'',
'',
Math.floor(marStartedAtMs / 1000),
Math.floor(marStartedAtMs / 1000),
1,
);
const dashboard = getTrendsDashboard(db, '30d', 'month');
assert.equal(dashboard.activity.watchTime.length, 2);
assert.deepEqual(
dashboard.progress.newWords.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
assert.deepEqual(
dashboard.progress.episodes.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
assert.deepEqual(
dashboard.progress.lookups.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
} finally {
globalThis.Date = RealDate;
db.close();
cleanupDbPath(dbPath);
}
});
test('getQueryHints reads all-time totals from lifetime summary', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -857,6 +1059,72 @@ test('getQueryHints reads all-time totals from lifetime summary', () => {
}
});
test('getQueryHints computes weekly new-word cutoff from calendar midnights', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
const RealDate = Date;
class MockDate extends Date {
constructor(...args: ConstructorParameters<typeof Date>) {
super(...(args.length === 0 ? [new RealDate(2026, 2, 15, 12, 0, 0).getTime()] : args));
}
static override now(): number {
return new RealDate(2026, 2, 15, 12, 0, 0).getTime();
}
}
try {
globalThis.Date = MockDate as DateConstructor;
ensureSchema(db);
const insertWord = db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const justBeforeWeekBoundary = Math.floor(
new RealDate(2026, 2, 7, 23, 30, 0).getTime() / 1000,
);
const justAfterWeekBoundary = Math.floor(
new RealDate(2026, 2, 8, 0, 30, 0).getTime() / 1000,
);
insertWord.run(
'境界前',
'境界前',
'きょうかいまえ',
'noun',
'名詞',
'',
'',
justBeforeWeekBoundary,
justBeforeWeekBoundary,
1,
);
insertWord.run(
'境界後',
'境界後',
'きょうかいご',
'noun',
'名詞',
'',
'',
justAfterWeekBoundary,
justAfterWeekBoundary,
1,
);
const hints = getQueryHints(db);
assert.equal(hints.newWordsThisWeek, 1);
} finally {
globalThis.Date = RealDate;
db.close();
cleanupDbPath(dbPath);
}
});
test('getQueryHints counts new words by distinct headword first-seen time', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);

View File

@@ -8,6 +8,7 @@ import {
pruneRawRetention,
pruneRollupRetention,
runOptimizeMaintenance,
toMonthKey,
} from './maintenance';
import { ensureSchema } from './storage';
@@ -81,6 +82,12 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
}
});
test('toMonthKey floors negative timestamps into the prior UTC month', () => {
assert.equal(toMonthKey(-1), 196912);
assert.equal(toMonthKey(-86_400_000), 196912);
assert.equal(toMonthKey(0), 197001);
});
test('raw retention keeps rollups and rollup retention prunes them separately', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);

View File

@@ -30,7 +30,7 @@ interface RawRetentionResult {
}
export function toMonthKey(timestampMs: number): number {
const epochDay = Number(BigInt(Math.trunc(timestampMs)) / BigInt(DAILY_MS));
const epochDay = Math.floor(timestampMs / DAILY_MS);
const z = epochDay + 719468;
const era = Math.floor(z / 146097);
const doe = z - era * 146097;

View File

@@ -131,7 +131,8 @@ export function getSessionWordsByLine(
function getNewWordCounts(db: DatabaseSync): { newWordsToday: number; newWordsThisWeek: number } {
const now = new Date();
const todayStartSec = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000;
const weekAgoSec = todayStartSec - 7 * 86_400;
const weekAgoSec =
new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7).getTime() / 1000;
const row = db
.prepare(

View File

@@ -83,7 +83,13 @@ function getTrendMonthlyLimit(range: TrendRange): number {
if (range === 'all') {
return 120;
}
return Math.max(1, Math.ceil(TREND_DAY_LIMITS[range] / 30));
const now = new Date();
const cutoff = new Date(
now.getFullYear(),
now.getMonth(),
now.getDate() - (TREND_DAY_LIMITS[range] - 1),
);
return Math.max(1, (now.getFullYear() - cutoff.getFullYear()) * 12 + now.getMonth() - cutoff.getMonth() + 1);
}
function getTrendCutoffMs(range: TrendRange): number | null {
@@ -122,6 +128,11 @@ function getLocalDateForEpochDay(epochDay: number): Date {
return new Date(utcDate.getTime() + utcDate.getTimezoneOffset() * 60_000);
}
function getLocalMonthKey(timestampMs: number): number {
const date = new Date(timestampMs);
return date.getFullYear() * 100 + date.getMonth() + 1;
}
function getTrendSessionWordCount(session: Pick<TrendSessionMetricRow, 'tokensSeen'>): number {
return session.tokensSeen;
}
@@ -218,6 +229,20 @@ function buildSessionSeriesByDay(
.map(([epochDay, value]) => ({ label: dayLabel(epochDay), value }));
}
function buildSessionSeriesByMonth(
sessions: TrendSessionMetricRow[],
getValue: (session: TrendSessionMetricRow) => number,
): TrendChartPoint[] {
const byMonth = new Map<number, number>();
for (const session of sessions) {
const monthKey = getLocalMonthKey(session.startedAtMs);
byMonth.set(monthKey, (byMonth.get(monthKey) ?? 0) + getValue(session));
}
return Array.from(byMonth.entries())
.sort(([left], [right]) => left - right)
.map(([monthKey, value]) => ({ label: makeTrendLabel(monthKey), value }));
}
function buildLookupsPerHundredWords(sessions: TrendSessionMetricRow[]): TrendChartPoint[] {
const lookupsByDay = new Map<number, number>();
const wordsByDay = new Map<number, number>();
@@ -441,6 +466,26 @@ function buildEpisodesPerDayFromDailyRollups(
}));
}
function buildEpisodesPerMonthFromRollups(rollups: ImmersionSessionRollupRow[]): TrendChartPoint[] {
const byMonth = new Map<number, Set<number>>();
for (const rollup of rollups) {
if (rollup.videoId === null) {
continue;
}
const videoIds = byMonth.get(rollup.rollupDayOrMonth) ?? new Set<number>();
videoIds.add(rollup.videoId);
byMonth.set(rollup.rollupDayOrMonth, videoIds);
}
return Array.from(byMonth.entries())
.sort(([left], [right]) => left - right)
.map(([monthKey, videoIds]) => ({
label: makeTrendLabel(monthKey),
value: videoIds.size,
}));
}
function getTrendSessionMetrics(
db: DatabaseSync,
cutoffMs: number | null,
@@ -494,6 +539,32 @@ function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendCh
}));
}
function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] {
const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?';
const prepared = db.prepare(`
SELECT
CAST(strftime('%Y%m', first_seen, 'unixepoch', 'localtime') AS INTEGER) AS monthKey,
COUNT(*) AS wordCount
FROM imm_words
WHERE first_seen IS NOT NULL
${whereClause}
GROUP BY monthKey
ORDER BY monthKey ASC
`);
const rows = (
cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000))
) as Array<{
monthKey: number;
wordCount: number;
}>;
return rows.map((row) => ({
label: makeTrendLabel(row.monthKey),
value: row.wordCount,
}));
}
export function getTrendsDashboard(
db: DatabaseSync,
range: TrendRange = '30d',
@@ -502,10 +573,11 @@ export function getTrendsDashboard(
const dayLimit = getTrendDayLimit(range);
const monthlyLimit = getTrendMonthlyLimit(range);
const cutoffMs = getTrendCutoffMs(range);
const chartRollups =
groupBy === 'month' ? getMonthlyRollups(db, monthlyLimit) : getDailyRollups(db, dayLimit);
const useMonthlyBuckets = groupBy === 'month';
const dailyRollups = getDailyRollups(db, dayLimit);
const monthlyRollups = getMonthlyRollups(db, monthlyLimit);
const chartRollups = useMonthlyBuckets ? monthlyRollups : dailyRollups;
const sessions = getTrendSessionMetrics(db, cutoffMs);
const titlesByVideoId = getVideoAnimeTitleMap(
db,
@@ -545,11 +617,19 @@ export function getTrendsDashboard(
watchTime: accumulatePoints(activity.watchTime),
sessions: accumulatePoints(activity.sessions),
words: accumulatePoints(activity.words),
newWords: accumulatePoints(buildNewWordsPerDay(db, cutoffMs)),
newWords: accumulatePoints(
useMonthlyBuckets ? buildNewWordsPerMonth(db, cutoffMs) : buildNewWordsPerDay(db, cutoffMs),
),
cards: accumulatePoints(activity.cards),
episodes: accumulatePoints(buildEpisodesPerDayFromDailyRollups(dailyRollups)),
episodes: accumulatePoints(
useMonthlyBuckets
? buildEpisodesPerMonthFromRollups(monthlyRollups)
: buildEpisodesPerDayFromDailyRollups(dailyRollups),
),
lookups: accumulatePoints(
buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount),
useMonthlyBuckets
? buildSessionSeriesByMonth(sessions, (session) => session.yomitanLookupCount)
: buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount),
),
},
ratios: {

View File

@@ -5,6 +5,8 @@ import { createMainBootServices } from './services';
test('createMainBootServices builds boot-phase service bundle', () => {
const calls: string[] = [];
let setPathValue: string | null = null;
const appOnCalls: string[] = [];
let secondInstanceHandlerRegistered = false;
const services = createMainBootServices({
platform: 'linux',
@@ -27,12 +29,17 @@ test('createMainBootServices builds boot-phase service bundle', () => {
setPathValue = value;
},
quit: () => {},
on: () => ({}),
on: (event) => {
appOnCalls.push(event);
return {};
},
whenReady: async () => {},
},
shouldBypassSingleInstanceLock: () => false,
requestSingleInstanceLockEarly: () => true,
registerSecondInstanceHandlerEarly: () => {},
registerSecondInstanceHandlerEarly: () => {
secondInstanceHandlerRegistered = true;
},
onConfigStartupParseError: () => {
throw new Error('unexpected parse failure');
},
@@ -78,6 +85,10 @@ test('createMainBootServices builds boot-phase service bundle', () => {
mpvSocketPath: '/tmp/subminer.sock',
texthookerPort: 5174,
});
assert.equal(services.appLifecycleApp.on('ready', () => {}), services.appLifecycleApp);
assert.equal(services.appLifecycleApp.on('second-instance', () => {}), services.appLifecycleApp);
assert.deepEqual(appOnCalls, ['ready']);
assert.equal(secondInstanceHandlerRegistered, true);
assert.deepEqual(calls, ['mkdir:/tmp/subminer-config']);
assert.equal(setPathValue, '/tmp/subminer-config');
});

View File

@@ -231,10 +231,10 @@ export function createMainBootServices<
params.registerSecondInstanceHandlerEarly(
listener as (_event: unknown, argv: string[]) => void,
);
return params.app;
return appLifecycleApp;
}
params.app.on(event, listener);
return params.app;
return appLifecycleApp;
},
whenReady: () => params.app.whenReady(),
} as TAppLifecycleApp;

View File

@@ -48,9 +48,14 @@ test('buildDictionaryZip writes a valid stored zip without fs.writeFileSync', ()
const termEntries: CharacterDictionaryTermEntry[] = [
['アルファ', 'あるふぁ', '', '', 0, ['Alpha entry'], 0, 'name'],
];
const originalWriteFileSync = fs.writeFileSync;
const originalBufferConcat = Buffer.concat;
try {
fs.writeFileSync = ((..._args: unknown[]) => {
throw new Error('buildDictionaryZip should not call fs.writeFileSync');
}) as typeof fs.writeFileSync;
Buffer.concat = ((...args: Parameters<typeof Buffer.concat>) => {
throw new Error(`buildDictionaryZip should not Buffer.concat the full archive (${args[0].length} chunks)`);
}) as typeof Buffer.concat;
@@ -92,6 +97,7 @@ test('buildDictionaryZip writes a valid stored zip without fs.writeFileSync', ()
assert.equal(termBank[0]?.[0], 'アルファ');
assert.deepEqual(entries.get('images/alpha.bin'), Buffer.from([1, 2, 3]));
} finally {
fs.writeFileSync = originalWriteFileSync;
Buffer.concat = originalBufferConcat;
cleanupDir(tempDir);
}