fix(stats): use yomitan tokens for subtitle counts

This commit is contained in:
2026-03-17 22:33:08 -07:00
parent ecb41a490b
commit 8f39416ff5
35 changed files with 991 additions and 507 deletions

View File

@@ -18,7 +18,6 @@ const SESSION_SUMMARIES = [
totalWatchedMs: 60_000,
activeWatchedMs: 50_000,
linesSeen: 10,
wordsSeen: 100,
tokensSeen: 80,
cardsMined: 2,
lookupCount: 5,
@@ -34,11 +33,10 @@ const DAILY_ROLLUPS = [
totalSessions: 1,
totalActiveMin: 10,
totalLinesSeen: 10,
totalWordsSeen: 100,
totalTokensSeen: 80,
totalCards: 2,
cardsPerHour: 12,
wordsPerMin: 10,
tokensPerMin: 10,
lookupHitRate: 0.8,
},
];
@@ -96,7 +94,7 @@ const ANIME_LIBRARY = [
totalSessions: 3,
totalActiveMs: 180_000,
totalCards: 5,
totalWordsSeen: 300,
totalTokensSeen: 300,
episodeCount: 2,
episodesTotal: 25,
lastWatchedMs: Date.now(),
@@ -113,7 +111,7 @@ const ANIME_DETAIL = {
totalSessions: 3,
totalActiveMs: 180_000,
totalCards: 5,
totalWordsSeen: 300,
totalTokensSeen: 300,
totalLinesSeen: 50,
totalLookupCount: 20,
totalLookupHits: 15,
@@ -198,7 +196,7 @@ const ANIME_EPISODES = [
totalSessions: 1,
totalActiveMs: 90_000,
totalCards: 3,
totalWordsSeen: 150,
totalTokensSeen: 150,
lastWatchedMs: Date.now(),
},
];
@@ -349,6 +347,47 @@ describe('stats server API routes', () => {
assert.ok(Array.isArray(body));
});
it('GET /api/stats/sessions/:id/events forwards event type filters to the tracker', async () => {
let seenSessionId = 0;
let seenLimit = 0;
let seenTypes: number[] | undefined;
const app = createStatsApp(
createMockTracker({
getSessionEvents: async (sessionId: number, limit?: number, eventTypes?: number[]) => {
seenSessionId = sessionId;
seenLimit = limit ?? 0;
seenTypes = eventTypes;
return [];
},
}),
);
const res = await app.request('/api/stats/sessions/7/events?limit=12&types=4,5,9');
assert.equal(res.status, 200);
assert.equal(seenSessionId, 7);
assert.equal(seenLimit, 12);
assert.deepEqual(seenTypes, [4, 5, 9]);
});
it('GET /api/stats/sessions/:id/timeline requests the full session when no limit is provided', async () => {
let seenSessionId = 0;
let seenLimit: number | undefined;
const app = createStatsApp(
createMockTracker({
getSessionTimeline: async (sessionId: number, limit?: number) => {
seenSessionId = sessionId;
seenLimit = limit;
return [];
},
}),
);
const res = await app.request('/api/stats/sessions/7/timeline');
assert.equal(res.status, 200);
assert.equal(seenSessionId, 7);
assert.equal(seenLimit, undefined);
});
it('GET /api/stats/sessions/:id/known-words-timeline preserves line positions and counts known occurrences', async () => {
await withTempDir(async (dir) => {
const cachePath = path.join(dir, 'known-words.json');

View File

@@ -218,13 +218,13 @@ test('finalize updates lifetime summary rows from final session metrics', async
} | null;
const mediaRow = db
.prepare(
'SELECT total_sessions, total_cards, total_active_ms, total_words_seen, total_lines_seen FROM imm_lifetime_media WHERE video_id = ?',
'SELECT total_sessions, total_cards, total_active_ms, total_tokens_seen, total_lines_seen FROM imm_lifetime_media WHERE video_id = ?',
)
.get(videoId) as {
total_sessions: number;
total_cards: number;
total_active_ms: number;
total_words_seen: number;
total_tokens_seen: number;
total_lines_seen: number;
} | null;
const animeIdRow = db
@@ -675,7 +675,6 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
total_watched_ms,
active_watched_ms,
lines_seen,
words_seen,
tokens_seen,
cards_mined,
lookup_count,
@@ -691,7 +690,6 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
5000,
4000,
12,
90,
120,
2,
5,
@@ -711,7 +709,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
const sessionRow = restartedApi.db
.prepare(
`
SELECT ended_at_ms, status, active_watched_ms, words_seen, cards_mined
SELECT ended_at_ms, status, active_watched_ms, tokens_seen, cards_mined
FROM imm_sessions
WHERE session_id = 1
`,
@@ -720,7 +718,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
ended_at_ms: number | null;
status: number;
active_watched_ms: number;
words_seen: number;
tokens_seen: number;
cards_mined: number;
} | null;
const globalRow = restartedApi.db
@@ -754,7 +752,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
assert.ok(Number(sessionRow?.ended_at_ms ?? 0) >= sampleMs);
assert.equal(sessionRow?.status, 2);
assert.equal(sessionRow?.active_watched_ms, 4000);
assert.equal(sessionRow?.words_seen, 90);
assert.equal(sessionRow?.tokens_seen, 120);
assert.equal(sessionRow?.cards_mined, 2);
assert.ok(globalRow);
@@ -782,7 +780,18 @@ test('persists and retrieves minimum immersion tracking fields', async () => {
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/episode-3.mkv', 'Episode 3');
tracker.recordSubtitleLine('alpha beta', 0, 1.2);
tracker.recordSubtitleLine('alpha beta', 0, 1.2, [
makeMergedToken({
surface: 'alpha',
headword: 'alpha',
reading: 'alpha',
}),
makeMergedToken({
surface: 'beta',
headword: 'beta',
reading: 'beta',
}),
]);
tracker.recordCardsMined(2);
tracker.recordLookup(true);
tracker.recordPlaybackPosition(12.5);
@@ -811,14 +820,13 @@ test('persists and retrieves minimum immersion tracking fields', async () => {
} | null;
const telemetryRow = db
.prepare(
`SELECT lines_seen, words_seen, tokens_seen, cards_mined
`SELECT lines_seen, tokens_seen, cards_mined
FROM imm_session_telemetry
ORDER BY sample_ms DESC, telemetry_id DESC
LIMIT 1`,
)
.get() as {
lines_seen: number;
words_seen: number;
tokens_seen: number;
cards_mined: number;
} | null;
@@ -831,7 +839,6 @@ test('persists and retrieves minimum immersion tracking fields', async () => {
assert.ok(telemetryRow);
assert.ok(Number(telemetryRow?.lines_seen ?? 0) >= 1);
assert.ok(Number(telemetryRow?.words_seen ?? 0) >= 2);
assert.ok(Number(telemetryRow?.tokens_seen ?? 0) >= 2);
assert.ok(Number(telemetryRow?.cards_mined ?? 0) >= 2);
} finally {
@@ -1062,6 +1069,87 @@ test('recordSubtitleLine persists counted allowed tokenized vocabulary rows and
}
});
test('recordSubtitleLine counts exact Yomitan tokens for session metrics', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/token-counting.mkv', 'Token Counting');
tracker.recordSubtitleLine('猫 猫 日 日 は 知っている', 0, 1, [
makeMergedToken({
surface: '猫',
headword: '猫',
reading: 'ねこ',
partOfSpeech: PartOfSpeech.noun,
pos1: '名詞',
}),
makeMergedToken({
surface: '猫',
headword: '猫',
reading: 'ねこ',
partOfSpeech: PartOfSpeech.noun,
pos1: '名詞',
}),
makeMergedToken({
surface: 'は',
headword: 'は',
reading: 'は',
partOfSpeech: PartOfSpeech.particle,
pos1: '助詞',
}),
makeMergedToken({
surface: '知っている',
headword: '知る',
reading: 'しっている',
partOfSpeech: PartOfSpeech.other,
pos1: '動詞',
}),
]);
const privateApi = tracker as unknown as {
flushTelemetry: (force?: boolean) => void;
flushNow: () => void;
};
privateApi.flushTelemetry(true);
privateApi.flushNow();
const summaries = await tracker.getSessionSummaries(10);
assert.equal(summaries[0]?.tokensSeen, 4);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('recordSubtitleLine leaves session token counts at zero when tokenization is unavailable', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
try {
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('/tmp/no-tokenization.mkv', 'No Tokenization');
tracker.recordSubtitleLine('alpha beta gamma', 0, 1.2, null);
const privateApi = tracker as unknown as {
flushTelemetry: (force?: boolean) => void;
flushNow: () => void;
};
privateApi.flushTelemetry(true);
privateApi.flushNow();
const summaries = await tracker.getSessionSummaries(10);
assert.equal(summaries[0]?.tokensSeen, 0);
} finally {
tracker?.destroy();
cleanupDbPath(dbPath);
}
});
test('subtitle-line event payload omits duplicated subtitle text', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
@@ -1094,11 +1182,11 @@ test('subtitle-line event payload omits duplicated subtitle text', async () => {
assert.ok(row?.payloadJson);
const parsed = JSON.parse(row?.payloadJson ?? '{}') as {
event?: string;
words?: number;
tokens?: number;
text?: string;
};
assert.equal(parsed.event, 'subtitle-line');
assert.equal(typeof parsed.words, 'number');
assert.equal(typeof parsed.tokens, 'number');
assert.equal('text' in parsed, false);
} finally {
tracker?.destroy();
@@ -1548,12 +1636,11 @@ test('zero retention days disables prune checks while preserving rollups', async
total_sessions,
total_active_min,
total_lines_seen,
total_words_seen,
total_tokens_seen,
total_cards
) VALUES
(${insertedDailyRollupKeys[0]}, 1, 1, 1, 1, 1, 1, 1),
(${insertedDailyRollupKeys[1]}, 1, 1, 1, 1, 1, 1, 1)
(${insertedDailyRollupKeys[0]}, 1, 1, 1, 1, 1, 1),
(${insertedDailyRollupKeys[1]}, 1, 1, 1, 1, 1, 1)
`);
privateApi.db.exec(`
INSERT INTO imm_monthly_rollups (
@@ -1562,14 +1649,13 @@ test('zero retention days disables prune checks while preserving rollups', async
total_sessions,
total_active_min,
total_lines_seen,
total_words_seen,
total_tokens_seen,
total_cards,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES
(${insertedMonthlyRollupKeys[0]}, 1, 1, 1, 1, 1, 1, 1, ${olderMs}, ${olderMs}),
(${insertedMonthlyRollupKeys[1]}, 1, 1, 1, 1, 1, 1, 1, ${oldMs}, ${oldMs})
(${insertedMonthlyRollupKeys[0]}, 1, 1, 1, 1, 1, 1, ${olderMs}, ${olderMs}),
(${insertedMonthlyRollupKeys[1]}, 1, 1, 1, 1, 1, 1, ${oldMs}, ${oldMs})
`);
privateApi.runMaintenance();
@@ -1668,7 +1754,6 @@ test('monthly rollups are grouped by calendar month', async () => {
total_watched_ms,
active_watched_ms,
lines_seen,
words_seen,
tokens_seen,
cards_mined,
lookup_count,
@@ -1685,7 +1770,6 @@ test('monthly rollups are grouped by calendar month', async () => {
5000,
1,
2,
2,
0,
0,
0,
@@ -1725,7 +1809,6 @@ test('monthly rollups are grouped by calendar month', async () => {
total_watched_ms,
active_watched_ms,
lines_seen,
words_seen,
tokens_seen,
cards_mined,
lookup_count,
@@ -1742,7 +1825,6 @@ test('monthly rollups are grouped by calendar month', async () => {
4000,
2,
3,
3,
1,
1,
1,
@@ -1786,13 +1868,12 @@ test('flushSingle reuses cached prepared statements', async () => {
lineIndex?: number | null;
segmentStartMs?: number | null;
segmentEndMs?: number | null;
wordsDelta?: number;
tokensDelta?: number;
cardsDelta?: number;
payloadJson?: string | null;
totalWatchedMs?: number;
activeWatchedMs?: number;
linesSeen?: number;
wordsSeen?: number;
tokensSeen?: number;
cardsMined?: number;
lookupCount?: number;
@@ -1862,7 +1943,6 @@ test('flushSingle reuses cached prepared statements', async () => {
totalWatchedMs: 1000,
activeWatchedMs: 1000,
linesSeen: 1,
wordsSeen: 2,
tokensSeen: 2,
cardsMined: 0,
lookupCount: 0,
@@ -1882,7 +1962,7 @@ test('flushSingle reuses cached prepared statements', async () => {
lineIndex: 1,
segmentStartMs: 0,
segmentEndMs: 1000,
wordsDelta: 2,
tokensDelta: 2,
cardsDelta: 0,
payloadJson: '{"event":"subtitle-line"}',
});

View File

@@ -80,7 +80,6 @@ import {
} from './immersion-tracker/query';
import {
buildVideoKey,
calculateTextMetrics,
deriveCanonicalTitle,
isKanji,
isRemoteSource,
@@ -334,7 +333,7 @@ export class ImmersionTrackerService {
return getSessionSummaries(this.db, limit);
}
async getSessionTimeline(sessionId: number, limit = 200): Promise<SessionTimelineRow[]> {
async getSessionTimeline(sessionId: number, limit?: number): Promise<SessionTimelineRow[]> {
return getSessionTimeline(this.db, sessionId, limit);
}
@@ -419,8 +418,12 @@ export class ImmersionTrackerService {
return getKanjiOccurrences(this.db, kanji, limit, offset);
}
async getSessionEvents(sessionId: number, limit = 500): Promise<SessionEventRow[]> {
return getSessionEvents(this.db, sessionId, limit);
async getSessionEvents(
sessionId: number,
limit = 500,
eventTypes?: number[],
): Promise<SessionEventRow[]> {
return getSessionEvents(this.db, sessionId, limit, eventTypes);
}
async getMediaLibrary(): Promise<MediaLibraryRow[]> {
@@ -747,11 +750,10 @@ export class ImmersionTrackerService {
const nowMs = Date.now();
const nowSec = nowMs / 1000;
const metrics = calculateTextMetrics(cleaned);
const tokenCount = tokens?.length ?? 0;
this.sessionState.currentLineIndex += 1;
this.sessionState.linesSeen += 1;
this.sessionState.wordsSeen += metrics.words;
this.sessionState.tokensSeen += metrics.tokens;
this.sessionState.tokensSeen += tokenCount;
this.sessionState.pendingTelemetry = true;
const wordOccurrences = new Map<string, CountedWordOccurrence>();
@@ -821,13 +823,13 @@ export class ImmersionTrackerService {
lineIndex: this.sessionState.currentLineIndex,
segmentStartMs: secToMs(startSec),
segmentEndMs: secToMs(endSec),
wordsDelta: metrics.words,
tokensDelta: tokenCount,
cardsDelta: 0,
eventType: EVENT_SUBTITLE_LINE,
payloadJson: sanitizePayload(
{
event: 'subtitle-line',
words: metrics.words,
tokens: tokenCount,
},
this.maxPayloadBytes,
),
@@ -876,7 +878,7 @@ export class ImmersionTrackerService {
sessionId: this.sessionState.sessionId,
sampleMs: nowMs,
eventType: EVENT_SEEK_FORWARD,
wordsDelta: 0,
tokensDelta: 0,
cardsDelta: 0,
segmentStartMs: this.sessionState.lastMediaMs,
segmentEndMs: mediaMs,
@@ -896,7 +898,7 @@ export class ImmersionTrackerService {
sessionId: this.sessionState.sessionId,
sampleMs: nowMs,
eventType: EVENT_SEEK_BACKWARD,
wordsDelta: 0,
tokensDelta: 0,
cardsDelta: 0,
segmentStartMs: this.sessionState.lastMediaMs,
segmentEndMs: mediaMs,
@@ -940,7 +942,7 @@ export class ImmersionTrackerService {
sampleMs: nowMs,
eventType: EVENT_PAUSE_START,
cardsDelta: 0,
wordsDelta: 0,
tokensDelta: 0,
payloadJson: sanitizePayload({ paused: true }, this.maxPayloadBytes),
});
} else {
@@ -955,7 +957,7 @@ export class ImmersionTrackerService {
sampleMs: nowMs,
eventType: EVENT_PAUSE_END,
cardsDelta: 0,
wordsDelta: 0,
tokensDelta: 0,
payloadJson: sanitizePayload({ paused: false }, this.maxPayloadBytes),
});
}
@@ -976,7 +978,7 @@ export class ImmersionTrackerService {
sampleMs: Date.now(),
eventType: EVENT_LOOKUP,
cardsDelta: 0,
wordsDelta: 0,
tokensDelta: 0,
payloadJson: sanitizePayload(
{
hit,
@@ -996,7 +998,7 @@ export class ImmersionTrackerService {
sampleMs: Date.now(),
eventType: EVENT_YOMITAN_LOOKUP,
cardsDelta: 0,
wordsDelta: 0,
tokensDelta: 0,
payloadJson: null,
});
}
@@ -1010,7 +1012,7 @@ export class ImmersionTrackerService {
sessionId: this.sessionState.sessionId,
sampleMs: Date.now(),
eventType: EVENT_CARD_MINED,
wordsDelta: 0,
tokensDelta: 0,
cardsDelta: count,
payloadJson: sanitizePayload(
{ cardsMined: count, ...(noteIds?.length ? { noteIds } : {}) },
@@ -1029,7 +1031,7 @@ export class ImmersionTrackerService {
sampleMs: Date.now(),
eventType: EVENT_MEDIA_BUFFER,
cardsDelta: 0,
wordsDelta: 0,
tokensDelta: 0,
payloadJson: sanitizePayload(
{
buffer: true,
@@ -1062,7 +1064,6 @@ export class ImmersionTrackerService {
totalWatchedMs: this.sessionState.totalWatchedMs,
activeWatchedMs: this.sessionState.activeWatchedMs,
linesSeen: this.sessionState.linesSeen,
wordsSeen: this.sessionState.wordsSeen,
tokensSeen: this.sessionState.tokensSeen,
cardsMined: this.sessionState.cardsMined,
lookupCount: this.sessionState.lookupCount,
@@ -1191,7 +1192,6 @@ export class ImmersionTrackerService {
totalWatchedMs: 0,
activeWatchedMs: 0,
linesSeen: 0,
wordsSeen: 0,
tokensSeen: 0,
cardsMined: 0,
lookupCount: 0,

View File

@@ -32,11 +32,17 @@ import {
getVocabularyStats,
getKanjiStats,
getSessionEvents,
getSessionTimeline,
getSessionWordsByLine,
getWordOccurrences,
upsertCoverArt,
} from '../query.js';
import { SOURCE_TYPE_LOCAL, EVENT_SUBTITLE_LINE } from '../types.js';
import {
SOURCE_TYPE_LOCAL,
EVENT_CARD_MINED,
EVENT_SUBTITLE_LINE,
EVENT_YOMITAN_LOOKUP,
} from '../types.js';
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-imm-query-test-'));
@@ -99,7 +105,6 @@ test('getSessionSummaries returns sessionId and canonicalTitle', () => {
2_500,
5,
10,
10,
1,
2,
1,
@@ -124,7 +129,6 @@ test('getSessionSummaries returns sessionId and canonicalTitle', () => {
assert.equal(row.linesSeen, 5);
assert.equal(row.totalWatchedMs, 3_000);
assert.equal(row.activeWatchedMs, 2_500);
assert.equal(row.wordsSeen, 10);
assert.equal(row.tokensSeen, 10);
assert.equal(row.lookupCount, 2);
assert.equal(row.lookupHits, 1);
@@ -135,6 +139,57 @@ test('getSessionSummaries returns sessionId and canonicalTitle', () => {
}
});
test('getSessionTimeline returns the full session when no limit is provided', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/full-timeline-test.mkv', {
canonicalTitle: 'Full Timeline Test',
sourcePath: '/tmp/full-timeline-test.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const startedAtMs = 2_000_000;
const { sessionId } = startSessionRecord(db, videoId, startedAtMs);
for (let sample = 0; sample < 205; sample += 1) {
const sampleMs = startedAtMs + sample * 500;
stmts.telemetryInsertStmt.run(
sessionId,
sampleMs,
sample * 500,
sample * 450,
sample,
sample * 4,
0,
0,
0,
0,
0,
0,
0,
0,
sampleMs,
sampleMs,
);
}
const rows = getSessionTimeline(db, sessionId);
assert.equal(rows.length, 205);
assert.equal(rows[0]?.linesSeen, 204);
assert.equal(rows.at(-1)?.linesSeen, 0);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getDailyRollups limits by distinct days (not rows)', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -146,15 +201,15 @@ test('getDailyRollups limits by distinct days (not rows)', () => {
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
);
insert.run(10, 1, 1, 1, 0, 0, 0, 2);
insert.run(10, 2, 1, 1, 0, 0, 0, 3);
insert.run(9, 1, 1, 1, 0, 0, 0, 1);
insert.run(8, 1, 1, 1, 0, 0, 0, 1);
insert.run(10, 1, 1, 1, 0, 0, 2);
insert.run(10, 2, 1, 1, 0, 0, 3);
insert.run(9, 1, 1, 1, 0, 0, 1);
insert.run(8, 1, 1, 1, 0, 0, 1);
const rows = getDailyRollups(db, 2);
assert.equal(rows.length, 3);
@@ -213,12 +268,11 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
startedAtMs,
activeWatchedMs,
cardsMined,
wordsSeen,
tokensSeen,
yomitanLookupCount,
] of [
[sessionOne.sessionId, dayOneStart, 30 * 60_000, 2, 100, 120, 8],
[sessionTwo.sessionId, dayTwoStart, 45 * 60_000, 3, 120, 140, 10],
[sessionOne.sessionId, dayOneStart, 30 * 60_000, 2, 120, 8],
[sessionTwo.sessionId, dayTwoStart, 45 * 60_000, 3, 140, 10],
] as const) {
stmts.telemetryInsertStmt.run(
sessionId,
@@ -226,7 +280,6 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
activeWatchedMs,
activeWatchedMs,
10,
wordsSeen,
tokensSeen,
cardsMined,
0,
@@ -248,7 +301,6 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
words_seen = ?,
tokens_seen = ?,
cards_mined = ?,
yomitan_lookup_count = ?
@@ -259,7 +311,6 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
activeWatchedMs,
activeWatchedMs,
10,
wordsSeen,
tokensSeen,
cardsMined,
yomitanLookupCount,
@@ -271,19 +322,19 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
).run(Math.floor(dayOneStart / 86_400_000), videoId, 1, 30, 10, 100, 120, 2);
).run(Math.floor(dayOneStart / 86_400_000), videoId, 1, 30, 10, 120, 2);
db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
).run(Math.floor(dayTwoStart / 86_400_000), videoId, 1, 45, 10, 120, 140, 3);
).run(Math.floor(dayTwoStart / 86_400_000), videoId, 1, 45, 10, 140, 3);
db.prepare(
`
@@ -349,14 +400,14 @@ test('getQueryHints reads all-time totals from lifetime summary', () => {
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
total_tokens_seen, total_cards
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
);
insert.run(10, 1, 1, 12, 0, 0, 0, 2);
insert.run(10, 2, 1, 11, 0, 0, 0, 3);
insert.run(9, 1, 1, 10, 0, 0, 0, 1);
insert.run(10, 1, 1, 12, 0, 0, 2);
insert.run(10, 2, 1, 11, 0, 0, 3);
insert.run(9, 1, 1, 10, 0, 0, 1);
const hints = getQueryHints(db);
assert.equal(hints.totalSessions, 4);
@@ -394,7 +445,6 @@ test('getSessionSummaries with no telemetry returns zero aggregates', () => {
assert.equal(row.totalWatchedMs, 0);
assert.equal(row.activeWatchedMs, 0);
assert.equal(row.linesSeen, 0);
assert.equal(row.wordsSeen, 0);
assert.equal(row.tokensSeen, 0);
assert.equal(row.lookupCount, 0);
assert.equal(row.lookupHits, 0);
@@ -432,7 +482,6 @@ test('getSessionSummaries uses denormalized session metrics for ended sessions w
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
words_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
@@ -440,7 +489,7 @@ test('getSessionSummaries uses denormalized session metrics for ended sessions w
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
).run(endedAtMs, 8_000, 7_000, 12, 34, 34, 5, 9, 6, endedAtMs, sessionId);
).run(endedAtMs, 8_000, 7_000, 12, 34, 5, 9, 6, endedAtMs, sessionId);
const rows = getSessionSummaries(db, 10);
const row = rows.find((r) => r.sessionId === sessionId);
@@ -448,7 +497,6 @@ test('getSessionSummaries uses denormalized session metrics for ended sessions w
assert.equal(row.totalWatchedMs, 8_000);
assert.equal(row.activeWatchedMs, 7_000);
assert.equal(row.linesSeen, 12);
assert.equal(row.wordsSeen, 34);
assert.equal(row.tokensSeen, 34);
assert.equal(row.cardsMined, 5);
assert.equal(row.lookupCount, 9);
@@ -639,15 +687,15 @@ test('getDailyRollups returns all rows for the most recent rollup days', () => {
const insertRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, cards_per_hour, words_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, cards_per_hour, tokens_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
insertRollup.run(3_000, 1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_000, 2, 2, 10, 20, 30, 40, 3, 0.1, 0.2, 0.3);
insertRollup.run(2_999, 3, 1, 5, 10, 15, 20, 1, 0.1, 0.2, 0.3);
insertRollup.run(2_998, 4, 1, 5, 10, 15, 20, 1, 0.1, 0.2, 0.3);
insertRollup.run(3_000, 1, 1, 10, 20, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_000, 2, 2, 10, 20, 40, 3, 0.1, 0.2, 0.3);
insertRollup.run(2_999, 3, 1, 5, 10, 20, 1, 0.1, 0.2, 0.3);
insertRollup.run(2_998, 4, 1, 5, 10, 20, 1, 0.1, 0.2, 0.3);
const rows = getDailyRollups(db, 1);
assert.equal(rows.length, 2);
@@ -675,16 +723,16 @@ test('getMonthlyRollups returns all rows for the most recent rollup months', ()
const insertRollup = db.prepare(
`
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const nowMs = Date.now();
insertRollup.run(202602, 1, 1, 10, 20, 30, 40, 5, nowMs, nowMs);
insertRollup.run(202602, 2, 1, 10, 20, 30, 40, 6, nowMs, nowMs);
insertRollup.run(202601, 3, 1, 5, 10, 15, 20, 2, nowMs, nowMs);
insertRollup.run(202600, 4, 1, 5, 10, 15, 20, 2, nowMs, nowMs);
insertRollup.run(202602, 1, 1, 10, 20, 40, 5, nowMs, nowMs);
insertRollup.run(202602, 2, 1, 10, 20, 40, 6, nowMs, nowMs);
insertRollup.run(202601, 3, 1, 5, 10, 20, 2, nowMs, nowMs);
insertRollup.run(202600, 4, 1, 5, 10, 20, 2, nowMs, nowMs);
const rows = getMonthlyRollups(db, 1);
assert.equal(rows.length, 2);
@@ -706,9 +754,9 @@ test('getAnimeDailyRollups returns all rows for the most recent rollup days', ()
const insertRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
total_tokens_seen, total_cards, cards_per_hour, words_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, cards_per_hour, tokens_per_min, lookup_hit_rate
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const animeId = getOrCreateAnimeRecord(db, {
@@ -738,9 +786,9 @@ test('getAnimeDailyRollups returns all rows for the most recent rollup days', ()
video2,
);
insertRollup.run(4_000, video1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(4_000, video2, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_999, video1, 1, 10, 20, 30, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(4_000, video1, 1, 10, 20, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(4_000, video2, 1, 10, 20, 40, 2, 0.1, 0.2, 0.3);
insertRollup.run(3_999, video1, 1, 10, 20, 40, 2, 0.1, 0.2, 0.3);
const rows = getAnimeDailyRollups(db, animeId, 1);
assert.equal(rows.length, 2);
@@ -1112,6 +1160,78 @@ test('getSessionEvents respects limit parameter', () => {
}
});
test('getSessionEvents filters by event type before applying limit', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/events-type-filter.mkv', {
canonicalTitle: 'Events Type Filter',
sourcePath: '/tmp/events-type-filter.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const startedAtMs = 7_500_000;
const { sessionId } = startSessionRecord(db, videoId, startedAtMs);
for (let i = 0; i < 5; i += 1) {
stmts.eventInsertStmt.run(
sessionId,
startedAtMs + i * 1_000,
EVENT_SUBTITLE_LINE,
i,
0,
500,
1,
0,
`{"line":"subtitle-${i}"}`,
startedAtMs + i * 1_000,
startedAtMs + i * 1_000,
);
}
stmts.eventInsertStmt.run(
sessionId,
startedAtMs + 10_000,
EVENT_CARD_MINED,
null,
null,
null,
0,
1,
'{"cardsMined":1}',
startedAtMs + 10_000,
startedAtMs + 10_000,
);
stmts.eventInsertStmt.run(
sessionId,
startedAtMs + 11_000,
EVENT_YOMITAN_LOOKUP,
null,
null,
null,
0,
0,
null,
startedAtMs + 11_000,
startedAtMs + 11_000,
);
const filtered = getSessionEvents(db, sessionId, 1, [EVENT_CARD_MINED]);
assert.equal(filtered.length, 1);
assert.equal(filtered[0]?.eventType, EVENT_CARD_MINED);
assert.equal(filtered[0]?.payload, '{"cardsMined":1}');
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('getSessionWordsByLine joins word occurrences through imm_words.id', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -1251,7 +1371,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
3_000,
10,
25,
25,
1,
3,
2,
@@ -1270,7 +1389,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
4_000,
11,
27,
27,
2,
4,
2,
@@ -1289,7 +1407,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
5_000,
12,
28,
28,
3,
5,
4,
@@ -1308,7 +1425,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
3_500,
8,
20,
20,
1,
2,
1,
@@ -1329,7 +1445,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
@@ -1338,9 +1453,9 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(lwaAnimeId, 3, 12_000, 6, 80, 33, 0, 2, 1, 1_000_000, 1_021_000, now, now);
).run(lwaAnimeId, 3, 12_000, 6, 33, 80, 2, 1, 1_000_000, 1_021_000, now, now);
db.prepare(
`
INSERT INTO imm_lifetime_anime (
@@ -1348,7 +1463,6 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
@@ -1357,9 +1471,9 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(frierenAnimeId, 1, 3_500, 1, 20, 8, 0, 1, 1, 1_030_000, 1_030_000, now, now);
).run(frierenAnimeId, 1, 3_500, 1, 8, 20, 1, 1, 1_030_000, 1_030_000, now, now);
const animeLibrary = getAnimeLibrary(db);
assert.equal(animeLibrary.length, 2);
@@ -1400,7 +1514,7 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
assert.equal(animeDetail?.totalSessions, 3);
assert.equal(animeDetail?.totalActiveMs, 12_000);
assert.equal(animeDetail?.totalCards, 6);
assert.equal(animeDetail?.totalWordsSeen, 80);
assert.equal(animeDetail?.totalTokensSeen, 80);
assert.equal(animeDetail?.totalLinesSeen, 33);
assert.equal(animeDetail?.totalLookupCount, 12);
assert.equal(animeDetail?.totalLookupHits, 8);
@@ -1416,7 +1530,7 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
totalSessions: row.totalSessions,
totalActiveMs: row.totalActiveMs,
totalCards: row.totalCards,
totalWordsSeen: row.totalWordsSeen,
totalTokensSeen: row.totalTokensSeen,
totalYomitanLookupCount: row.totalYomitanLookupCount,
})),
[
@@ -1427,7 +1541,7 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
totalSessions: 2,
totalActiveMs: 7_000,
totalCards: 3,
totalWordsSeen: 52,
totalTokensSeen: 52,
totalYomitanLookupCount: 0,
},
{
@@ -1437,7 +1551,7 @@ test('anime-level queries group by anime_id and preserve episode-level rows', ()
totalSessions: 1,
totalActiveMs: 5_000,
totalCards: 3,
totalWordsSeen: 28,
totalTokensSeen: 28,
totalYomitanLookupCount: 0,
},
],
@@ -1506,7 +1620,6 @@ test('anime library and detail still return lifetime rows without retained sessi
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
@@ -1515,9 +1628,9 @@ test('anime library and detail still return lifetime rows without retained sessi
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(animeId, 12, 4_500, 9, 200, 80, 15, 2, 2, 1_000_000, now, now, now);
).run(animeId, 12, 4_500, 9, 80, 200, 2, 2, 1_000_000, now, now, now);
const library = getAnimeLibrary(db);
assert.equal(library.length, 1);
@@ -1535,7 +1648,7 @@ test('anime library and detail still return lifetime rows without retained sessi
assert.equal(detail?.totalSessions, 12);
assert.equal(detail?.totalActiveMs, 4_500);
assert.equal(detail?.totalCards, 9);
assert.equal(detail?.totalWordsSeen, 200);
assert.equal(detail?.totalTokensSeen, 200);
assert.equal(detail?.totalLinesSeen, 80);
assert.equal(detail?.episodeCount, 2);
assert.equal(detail?.totalLookupCount, 0);
@@ -1573,7 +1686,6 @@ test('media library and detail queries read lifetime totals', () => {
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
@@ -1581,13 +1693,13 @@ test('media library and detail queries read lifetime totals', () => {
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const now = Date.now();
const older = now - 10_000;
insertLifetime.run(mediaOne, 3, 12_000, 4, 180, 10, 20, 1, 1_000, now, now, now, now);
insertLifetime.run(mediaTwo, 1, 2_000, 2, 40, 4, 6, 0, 900, older, now, now);
insertLifetime.run(mediaOne, 3, 12_000, 4, 10, 180, 1, 1_000, now, now, now);
insertLifetime.run(mediaTwo, 1, 2_000, 2, 4, 40, 0, 900, older, now, now);
const library = getMediaLibrary(db);
assert.equal(library.length, 2);
@@ -1598,7 +1710,7 @@ test('media library and detail queries read lifetime totals', () => {
totalSessions: row.totalSessions,
totalActiveMs: row.totalActiveMs,
totalCards: row.totalCards,
totalWordsSeen: row.totalWordsSeen,
totalTokensSeen: row.totalTokensSeen,
lastWatchedMs: row.lastWatchedMs,
hasCoverArt: row.hasCoverArt,
})),
@@ -1609,7 +1721,7 @@ test('media library and detail queries read lifetime totals', () => {
totalSessions: 3,
totalActiveMs: 12_000,
totalCards: 4,
totalWordsSeen: 180,
totalTokensSeen: 180,
lastWatchedMs: now,
hasCoverArt: 0,
},
@@ -1619,7 +1731,7 @@ test('media library and detail queries read lifetime totals', () => {
totalSessions: 1,
totalActiveMs: 2_000,
totalCards: 2,
totalWordsSeen: 40,
totalTokensSeen: 40,
lastWatchedMs: older,
hasCoverArt: 0,
},
@@ -1631,7 +1743,7 @@ test('media library and detail queries read lifetime totals', () => {
assert.equal(detail.totalSessions, 3);
assert.equal(detail.totalActiveMs, 12_000);
assert.equal(detail.totalCards, 4);
assert.equal(detail.totalWordsSeen, 180);
assert.equal(detail.totalTokensSeen, 180);
assert.equal(detail.totalLinesSeen, 10);
} finally {
db.close();
@@ -1697,7 +1809,6 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', ()
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
@@ -1705,7 +1816,7 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', ()
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, 1, 1000, 0, 0, 0, 0, 0, ?, ?, ?, ?)
) VALUES (?, 1, 1000, 0, 0, 0, 0, ?, ?, ?, ?)
`,
).run(videoOne, now, now, now, now);
db.prepare(
@@ -1715,7 +1826,6 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', ()
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
@@ -1723,7 +1833,7 @@ test('cover art queries reuse a shared blob across duplicate anime art rows', ()
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, 1, 1000, 0, 0, 0, 0, 0, ?, ?, ?, ?)
) VALUES (?, 1, 1000, 0, 0, 0, 0, ?, ?, ?, ?)
`,
).run(videoTwo, now, now - 1, now, now);
@@ -1823,20 +1933,20 @@ test('anime/media detail and episode queries use ended-session metrics when tele
db.prepare(
`
INSERT INTO imm_lifetime_anime (
anime_id, total_sessions, total_active_ms, total_cards, total_words_seen, total_lines_seen,
anime_id, total_sessions, total_active_ms, total_cards, total_lines_seen,
total_tokens_seen, episodes_started, episodes_completed, first_watched_ms, last_watched_ms,
CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(animeId, 3, 12_000, 6, 60, 24, 60, 2, 2, 1_000_000, 1_020_000, now, now);
).run(animeId, 3, 12_000, 6, 24, 60, 2, 2, 1_000_000, 1_020_000, now, now);
db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id, total_sessions, total_active_ms, total_cards, total_words_seen, total_lines_seen,
video_id, total_sessions, total_active_ms, total_cards, total_lines_seen,
total_tokens_seen, completed, first_watched_ms, last_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(episodeOne, 2, 7_000, 3, 30, 12, 30, 1, 1_000_000, 1_010_000, now, now);
).run(episodeOne, 2, 7_000, 3, 12, 30, 1, 1_000_000, 1_010_000, now, now);
const s1 = startSessionRecord(db, episodeOne, 1_000_000).sessionId;
const s2 = startSessionRecord(db, episodeOne, 1_010_000).sessionId;
@@ -1849,7 +1959,7 @@ test('anime/media detail and episode queries use ended-session metrics when tele
status = 2,
active_watched_ms = ?,
cards_mined = ?,
words_seen = ?,
tokens_seen = ?,
lookup_count = ?,
lookup_hits = ?,
LAST_UPDATE_DATE = ?
@@ -1872,7 +1982,7 @@ test('anime/media detail and episode queries use ended-session metrics when tele
totalSessions: row.totalSessions,
totalActiveMs: row.totalActiveMs,
totalCards: row.totalCards,
totalWordsSeen: row.totalWordsSeen,
totalTokensSeen: row.totalTokensSeen,
})),
[
{
@@ -1880,14 +1990,14 @@ test('anime/media detail and episode queries use ended-session metrics when tele
totalSessions: 2,
totalActiveMs: 7_000,
totalCards: 3,
totalWordsSeen: 30,
totalTokensSeen: 30,
},
{
videoId: episodeTwo,
totalSessions: 1,
totalActiveMs: 5_000,
totalCards: 3,
totalWordsSeen: 30,
totalTokensSeen: 30,
},
],
);
@@ -1897,7 +2007,7 @@ test('anime/media detail and episode queries use ended-session metrics when tele
assert.equal(mediaDetail?.totalSessions, 2);
assert.equal(mediaDetail?.totalActiveMs, 7_000);
assert.equal(mediaDetail?.totalCards, 3);
assert.equal(mediaDetail?.totalWordsSeen, 30);
assert.equal(mediaDetail?.totalTokensSeen, 30);
assert.equal(mediaDetail?.totalLookupCount, 9);
assert.equal(mediaDetail?.totalLookupHits, 7);
assert.equal(mediaDetail?.totalYomitanLookupCount, 0);

View File

@@ -7,7 +7,6 @@ interface TelemetryRow {
cards_mined: number | null;
lines_seen: number | null;
tokens_seen: number | null;
words_seen: number | null;
}
interface VideoRow {
@@ -46,7 +45,6 @@ interface RetainedSessionRow {
totalWatchedMs: number;
activeWatchedMs: number;
linesSeen: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
lookupCount: number;
@@ -150,7 +148,6 @@ function toRebuildSessionState(row: RetainedSessionRow): SessionState {
totalWatchedMs: Math.max(0, row.totalWatchedMs),
activeWatchedMs: Math.max(0, row.activeWatchedMs),
linesSeen: Math.max(0, row.linesSeen),
wordsSeen: Math.max(0, row.wordsSeen),
tokensSeen: Math.max(0, row.tokensSeen),
cardsMined: Math.max(0, row.cardsMined),
lookupCount: Math.max(0, row.lookupCount),
@@ -176,7 +173,6 @@ function getRetainedStaleActiveSessions(db: DatabaseSync): RetainedSessionRow[]
COALESCE(t.total_watched_ms, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(t.active_watched_ms, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(t.lines_seen, s.lines_seen, 0) AS linesSeen,
COALESCE(t.words_seen, s.words_seen, 0) AS wordsSeen,
COALESCE(t.tokens_seen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(t.cards_mined, s.cards_mined, 0) AS cardsMined,
COALESCE(t.lookup_count, s.lookup_count, 0) AS lookupCount,
@@ -209,7 +205,6 @@ function upsertLifetimeMedia(
nowMs: number,
activeMs: number,
cardsMined: number,
wordsSeen: number,
linesSeen: number,
tokensSeen: number,
completed: number,
@@ -223,7 +218,6 @@ function upsertLifetimeMedia(
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
completed,
@@ -232,12 +226,11 @@ function upsertLifetimeMedia(
CREATED_DATE,
LAST_UPDATE_DATE
)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(video_id) DO UPDATE SET
total_sessions = total_sessions + 1,
total_active_ms = total_active_ms + excluded.total_active_ms,
total_cards = total_cards + excluded.total_cards,
total_words_seen = total_words_seen + excluded.total_words_seen,
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
completed = MAX(completed, excluded.completed),
@@ -259,7 +252,6 @@ function upsertLifetimeMedia(
videoId,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
completed,
@@ -276,7 +268,6 @@ function upsertLifetimeAnime(
nowMs: number,
activeMs: number,
cardsMined: number,
wordsSeen: number,
linesSeen: number,
tokensSeen: number,
episodesStartedDelta: number,
@@ -291,7 +282,6 @@ function upsertLifetimeAnime(
total_sessions,
total_active_ms,
total_cards,
total_words_seen,
total_lines_seen,
total_tokens_seen,
episodes_started,
@@ -301,12 +291,11 @@ function upsertLifetimeAnime(
CREATED_DATE,
LAST_UPDATE_DATE
)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
VALUES (?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(anime_id) DO UPDATE SET
total_sessions = total_sessions + 1,
total_active_ms = total_active_ms + excluded.total_active_ms,
total_cards = total_cards + excluded.total_cards,
total_words_seen = total_words_seen + excluded.total_words_seen,
total_lines_seen = total_lines_seen + excluded.total_lines_seen,
total_tokens_seen = total_tokens_seen + excluded.total_tokens_seen,
episodes_started = episodes_started + excluded.episodes_started,
@@ -329,7 +318,6 @@ function upsertLifetimeAnime(
animeId,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
episodesStartedDelta,
@@ -372,7 +360,6 @@ export function applySessionLifetimeSummary(
SELECT
active_watched_ms,
cards_mined,
words_seen,
lines_seen,
tokens_seen
FROM imm_session_telemetry
@@ -407,9 +394,6 @@ export function applySessionLifetimeSummary(
const cardsMined = telemetry
? asPositiveNumber(telemetry.cards_mined, session.cardsMined)
: session.cardsMined;
const wordsSeen = telemetry
? asPositiveNumber(telemetry.words_seen, session.wordsSeen)
: session.wordsSeen;
const linesSeen = telemetry
? asPositiveNumber(telemetry.lines_seen, session.linesSeen)
: session.linesSeen;
@@ -470,7 +454,6 @@ export function applySessionLifetimeSummary(
nowMs,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
watched > 0 ? 1 : 0,
@@ -485,7 +468,6 @@ export function applySessionLifetimeSummary(
nowMs,
activeMs,
cardsMined,
wordsSeen,
linesSeen,
tokensSeen,
isFirstSessionForVideoRun ? 1 : 0,
@@ -509,7 +491,6 @@ export function rebuildLifetimeSummaries(db: DatabaseSync): LifetimeRebuildSumma
total_watched_ms AS totalWatchedMs,
active_watched_ms AS activeWatchedMs,
lines_seen AS linesSeen,
words_seen AS wordsSeen,
tokens_seen AS tokensSeen,
cards_mined AS cardsMined,
lookup_count AS lookupCount,

View File

@@ -109,16 +109,16 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
1, ${nowMs - 90 * 86_400_000}, 0, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards
) VALUES (
${oldDay}, 1, 1, 10, 1, 1, 1, 1
${oldDay}, 1, 1, 10, 1, 1, 1
);
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen, total_words_seen,
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
${oldMonth}, 1, 1, 10, 1, 1, 1, 1, ${nowMs}, ${nowMs}
${oldMonth}, 1, 1, 10, 1, 1, 1, ${nowMs}, ${nowMs}
);
`);

View File

@@ -125,8 +125,8 @@ function upsertDailyRollupsForGroups(
const upsertStmt = db.prepare(`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards, cards_per_hour,
words_per_min, lookup_hit_rate, CREATED_DATE, LAST_UPDATE_DATE
total_tokens_seen, total_cards, cards_per_hour,
tokens_per_min, lookup_hit_rate, CREATED_DATE, LAST_UPDATE_DATE
)
SELECT
CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS rollup_day,
@@ -134,7 +134,6 @@ function upsertDailyRollupsForGroups(
COUNT(DISTINCT s.session_id) AS total_sessions,
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
COALESCE(SUM(sm.max_words), 0) AS total_words_seen,
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
CASE
@@ -144,9 +143,9 @@ function upsertDailyRollupsForGroups(
END AS cards_per_hour,
CASE
WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0
THEN COALESCE(SUM(sm.max_words), 0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
THEN COALESCE(SUM(sm.max_tokens), 0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
ELSE NULL
END AS words_per_min,
END AS tokens_per_min,
CASE
WHEN COALESCE(SUM(sm.max_lookups), 0) > 0
THEN CAST(COALESCE(SUM(sm.max_hits), 0) AS REAL) / CAST(SUM(sm.max_lookups) AS REAL)
@@ -160,7 +159,6 @@ function upsertDailyRollupsForGroups(
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.lines_seen) AS max_lines,
MAX(t.words_seen) AS max_words,
MAX(t.tokens_seen) AS max_tokens,
MAX(t.cards_mined) AS max_cards,
MAX(t.lookup_count) AS max_lookups,
@@ -174,11 +172,10 @@ function upsertDailyRollupsForGroups(
total_sessions = excluded.total_sessions,
total_active_min = excluded.total_active_min,
total_lines_seen = excluded.total_lines_seen,
total_words_seen = excluded.total_words_seen,
total_tokens_seen = excluded.total_tokens_seen,
total_cards = excluded.total_cards,
cards_per_hour = excluded.cards_per_hour,
words_per_min = excluded.words_per_min,
tokens_per_min = excluded.tokens_per_min,
lookup_hit_rate = excluded.lookup_hit_rate,
CREATED_DATE = COALESCE(imm_daily_rollups.CREATED_DATE, excluded.CREATED_DATE),
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
@@ -201,7 +198,7 @@ function upsertMonthlyRollupsForGroups(
const upsertStmt = db.prepare(`
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_words_seen, total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
)
SELECT
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) AS rollup_month,
@@ -209,7 +206,6 @@ function upsertMonthlyRollupsForGroups(
COUNT(DISTINCT s.session_id) AS total_sessions,
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
COALESCE(SUM(sm.max_words), 0) AS total_words_seen,
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
? AS CREATED_DATE,
@@ -220,7 +216,6 @@ function upsertMonthlyRollupsForGroups(
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
MAX(t.lines_seen) AS max_lines,
MAX(t.words_seen) AS max_words,
MAX(t.tokens_seen) AS max_tokens,
MAX(t.cards_mined) AS max_cards
FROM imm_session_telemetry t
@@ -232,7 +227,6 @@ function upsertMonthlyRollupsForGroups(
total_sessions = excluded.total_sessions,
total_active_min = excluded.total_active_min,
total_lines_seen = excluded.total_lines_seen,
total_words_seen = excluded.total_words_seen,
total_tokens_seen = excluded.total_tokens_seen,
total_cards = excluded.total_cards,
CREATED_DATE = COALESCE(imm_monthly_rollups.CREATED_DATE, excluded.CREATED_DATE),

View File

@@ -77,7 +77,6 @@ const ACTIVE_SESSION_METRICS_CTE = `
MAX(t.total_watched_ms) AS totalWatchedMs,
MAX(t.active_watched_ms) AS activeWatchedMs,
MAX(t.lines_seen) AS linesSeen,
MAX(t.words_seen) AS wordsSeen,
MAX(t.tokens_seen) AS tokensSeen,
MAX(t.cards_mined) AS cardsMined,
MAX(t.lookup_count) AS lookupCount,
@@ -353,7 +352,6 @@ export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummar
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,
@@ -372,15 +370,30 @@ export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummar
export function getSessionTimeline(
db: DatabaseSync,
sessionId: number,
limit = 200,
limit?: number,
): SessionTimelineRow[] {
if (limit === undefined) {
const prepared = db.prepare(`
SELECT
sample_ms AS sampleMs,
total_watched_ms AS totalWatchedMs,
active_watched_ms AS activeWatchedMs,
lines_seen AS linesSeen,
tokens_seen AS tokensSeen,
cards_mined AS cardsMined
FROM imm_session_telemetry
WHERE session_id = ?
ORDER BY sample_ms DESC, telemetry_id DESC
`);
return prepared.all(sessionId) as unknown as SessionTimelineRow[];
}
const prepared = db.prepare(`
SELECT
sample_ms AS sampleMs,
total_watched_ms AS totalWatchedMs,
active_watched_ms AS activeWatchedMs,
lines_seen AS linesSeen,
words_seen AS wordsSeen,
tokens_seen AS tokensSeen,
cards_mined AS cardsMined
FROM imm_session_telemetry
@@ -609,11 +622,10 @@ export function getDailyRollups(db: DatabaseSync, limit = 60): ImmersionSessionR
r.total_sessions AS totalSessions,
r.total_active_min AS totalActiveMin,
r.total_lines_seen AS totalLinesSeen,
r.total_words_seen AS totalWordsSeen,
r.total_tokens_seen AS totalTokensSeen,
r.total_cards AS totalCards,
r.cards_per_hour AS cardsPerHour,
r.words_per_min AS wordsPerMin,
r.tokens_per_min AS tokensPerMin,
r.lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups r
WHERE r.rollup_day IN (SELECT rollup_day FROM recent_days)
@@ -637,11 +649,10 @@ export function getMonthlyRollups(db: DatabaseSync, limit = 24): ImmersionSessio
total_sessions AS totalSessions,
total_active_min AS totalActiveMin,
total_lines_seen AS totalLinesSeen,
total_words_seen AS totalWordsSeen,
total_tokens_seen AS totalTokensSeen,
total_cards AS totalCards,
0 AS cardsPerHour,
0 AS wordsPerMin,
0 AS tokensPerMin,
0 AS lookupHitRate
FROM imm_monthly_rollups
WHERE rollup_month IN (SELECT rollup_month FROM recent_months)
@@ -670,7 +681,6 @@ interface TrendSessionMetricRow {
canonicalTitle: string | null;
animeTitle: string | null;
activeWatchedMs: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
yomitanLookupCount: number;
@@ -760,10 +770,8 @@ function makeTrendLabel(value: number): string {
});
}
function getTrendSessionWordCount(
session: Pick<TrendSessionMetricRow, 'wordsSeen' | 'tokensSeen'>,
): number {
return session.tokensSeen > 0 ? session.tokensSeen : session.wordsSeen;
function getTrendSessionWordCount(session: Pick<TrendSessionMetricRow, 'tokensSeen'>): number {
return session.tokensSeen;
}
function resolveTrendAnimeTitle(value: {
@@ -796,7 +804,7 @@ function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) {
};
existing.activeMin += Math.round(rollup.totalActiveMin);
existing.cards += rollup.totalCards;
existing.words += rollup.totalWordsSeen;
existing.words += rollup.totalTokensSeen;
existing.sessions += rollup.totalSessions;
byKey.set(rollup.rollupDayOrMonth, existing);
}
@@ -1087,7 +1095,6 @@ function getTrendSessionMetrics(
v.canonical_title AS canonicalTitle,
a.canonical_title AS animeTitle,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0) AS yomitanLookupCount
@@ -1167,7 +1174,7 @@ export function getTrendsDashboard(
words: buildPerAnimeFromDailyRollups(
dailyRollups,
titlesByVideoId,
(rollup) => rollup.totalWordsSeen,
(rollup) => rollup.totalTokensSeen,
),
lookups: buildPerAnimeFromSessions(
sessions,
@@ -1595,12 +1602,25 @@ export function getSessionEvents(
db: DatabaseSync,
sessionId: number,
limit = 500,
eventTypes?: number[],
): SessionEventRow[] {
if (!eventTypes || eventTypes.length === 0) {
const stmt = db.prepare(`
SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload
FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ?
`);
return stmt.all(sessionId, limit) as SessionEventRow[];
}
const placeholders = eventTypes.map(() => '?').join(', ');
const stmt = db.prepare(`
SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload
FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ?
FROM imm_session_events
WHERE session_id = ? AND event_type IN (${placeholders})
ORDER BY ts_ms ASC
LIMIT ?
`);
return stmt.all(sessionId, limit) as SessionEventRow[];
return stmt.all(sessionId, ...eventTypes, limit) as SessionEventRow[];
}
export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] {
@@ -1614,7 +1634,7 @@ export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] {
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen,
COUNT(DISTINCT v.video_id) AS episodeCount,
a.episodes_total AS episodesTotal,
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs
@@ -1644,7 +1664,7 @@ export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRo
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen,
COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen,
COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount,
COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits,
@@ -1699,7 +1719,7 @@ export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisod
COUNT(DISTINCT s.session_id) AS totalSessions,
COALESCE(SUM(COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0)), 0) AS totalActiveMs,
COALESCE(SUM(COALESCE(asm.cardsMined, s.cards_mined, 0)), 0) AS totalCards,
COALESCE(SUM(COALESCE(asm.wordsSeen, s.words_seen, 0)), 0) AS totalWordsSeen,
COALESCE(SUM(COALESCE(asm.tokensSeen, s.tokens_seen, 0)), 0) AS totalTokensSeen,
COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount,
MAX(s.started_at_ms) AS lastWatchedMs
FROM imm_videos v
@@ -1728,7 +1748,7 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen,
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs,
CASE
WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1
@@ -1754,7 +1774,7 @@ export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRo
COALESCE(lm.total_sessions, 0) AS totalSessions,
COALESCE(lm.total_active_ms, 0) AS totalActiveMs,
COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_words_seen, 0) AS totalWordsSeen,
COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen,
COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen,
COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount,
COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits,
@@ -1788,7 +1808,6 @@ export function getMediaSessions(
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,
@@ -1826,11 +1845,10 @@ export function getMediaDailyRollups(
total_sessions AS totalSessions,
total_active_min AS totalActiveMin,
total_lines_seen AS totalLinesSeen,
total_words_seen AS totalWordsSeen,
total_tokens_seen AS totalTokensSeen,
total_cards AS totalCards,
cards_per_hour AS cardsPerHour,
words_per_min AS wordsPerMin,
tokens_per_min AS tokensPerMin,
lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups
WHERE video_id = ?
@@ -1859,9 +1877,9 @@ export function getAnimeDailyRollups(
)
SELECT r.rollup_day AS rollupDayOrMonth, r.video_id AS videoId,
r.total_sessions AS totalSessions, r.total_active_min AS totalActiveMin,
r.total_lines_seen AS totalLinesSeen, r.total_words_seen AS totalWordsSeen,
r.total_lines_seen AS totalLinesSeen,
r.total_tokens_seen AS totalTokensSeen, r.total_cards AS totalCards,
r.cards_per_hour AS cardsPerHour, r.words_per_min AS wordsPerMin,
r.cards_per_hour AS cardsPerHour, r.tokens_per_min AS tokensPerMin,
r.lookup_hit_rate AS lookupHitRate
FROM imm_daily_rollups r
JOIN imm_videos v ON v.video_id = r.video_id
@@ -2153,7 +2171,6 @@ export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSu
COALESCE(asm.totalWatchedMs, s.total_watched_ms, 0) AS totalWatchedMs,
COALESCE(asm.activeWatchedMs, s.active_watched_ms, 0) AS activeWatchedMs,
COALESCE(asm.linesSeen, s.lines_seen, 0) AS linesSeen,
COALESCE(asm.wordsSeen, s.words_seen, 0) AS wordsSeen,
COALESCE(asm.tokensSeen, s.tokens_seen, 0) AS tokensSeen,
COALESCE(asm.cardsMined, s.cards_mined, 0) AS cardsMined,
COALESCE(asm.lookupCount, s.lookup_count, 0) AS lookupCount,

View File

@@ -15,7 +15,6 @@ export function createInitialSessionState(
totalWatchedMs: 0,
activeWatchedMs: 0,
linesSeen: 0,
wordsSeen: 0,
tokensSeen: 0,
cardsMined: 0,
lookupCount: 0,
@@ -52,16 +51,6 @@ export function sanitizePayload(payload: Record<string, unknown>, maxPayloadByte
return json.length <= maxPayloadBytes ? json : JSON.stringify({ truncated: true });
}
export function calculateTextMetrics(value: string): {
words: number;
tokens: number;
} {
const words = value.split(/\s+/).filter(Boolean).length;
const cjkCount = value.match(/[\u3040-\u30ff\u4e00-\u9fff]/g)?.length ?? 0;
const tokens = Math.max(words, cjkCount);
return { words, tokens };
}
export function secToMs(seconds: number): number {
const coerced = Number(seconds);
if (!Number.isFinite(coerced)) return 0;

View File

@@ -42,7 +42,6 @@ export function finalizeSessionRecord(
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
words_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
@@ -62,7 +61,6 @@ export function finalizeSessionRecord(
sessionState.totalWatchedMs,
sessionState.activeWatchedMs,
sessionState.linesSeen,
sessionState.wordsSeen,
sessionState.tokensSeen,
sessionState.cardsMined,
sessionState.lookupCount,

View File

@@ -763,7 +763,6 @@ test('executeQueuedWrite inserts event and telemetry rows', () => {
totalWatchedMs: 1_000,
activeWatchedMs: 900,
linesSeen: 3,
wordsSeen: 6,
tokensSeen: 6,
cardsMined: 1,
lookupCount: 2,
@@ -786,7 +785,7 @@ test('executeQueuedWrite inserts event and telemetry rows', () => {
lineIndex: 1,
segmentStartMs: 0,
segmentEndMs: 800,
wordsDelta: 2,
tokensDelta: 2,
cardsDelta: 0,
payloadJson: '{"event":"subtitle-line"}',
},

View File

@@ -290,7 +290,6 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_ms INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
episodes_started INTEGER NOT NULL DEFAULT 0,
@@ -309,7 +308,6 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_ms INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
completed INTEGER NOT NULL DEFAULT 0,
@@ -574,7 +572,6 @@ export function ensureSchema(db: DatabaseSync): void {
total_watched_ms INTEGER NOT NULL DEFAULT 0,
active_watched_ms INTEGER NOT NULL DEFAULT 0,
lines_seen INTEGER NOT NULL DEFAULT 0,
words_seen INTEGER NOT NULL DEFAULT 0,
tokens_seen INTEGER NOT NULL DEFAULT 0,
cards_mined INTEGER NOT NULL DEFAULT 0,
lookup_count INTEGER NOT NULL DEFAULT 0,
@@ -598,7 +595,6 @@ export function ensureSchema(db: DatabaseSync): void {
total_watched_ms INTEGER NOT NULL DEFAULT 0,
active_watched_ms INTEGER NOT NULL DEFAULT 0,
lines_seen INTEGER NOT NULL DEFAULT 0,
words_seen INTEGER NOT NULL DEFAULT 0,
tokens_seen INTEGER NOT NULL DEFAULT 0,
cards_mined INTEGER NOT NULL DEFAULT 0,
lookup_count INTEGER NOT NULL DEFAULT 0,
@@ -623,7 +619,7 @@ export function ensureSchema(db: DatabaseSync): void {
line_index INTEGER,
segment_start_ms INTEGER,
segment_end_ms INTEGER,
words_delta INTEGER NOT NULL DEFAULT 0,
tokens_delta INTEGER NOT NULL DEFAULT 0,
cards_delta INTEGER NOT NULL DEFAULT 0,
payload_json TEXT,
CREATED_DATE INTEGER,
@@ -638,11 +634,10 @@ export function ensureSchema(db: DatabaseSync): void {
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_min REAL NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
cards_per_hour REAL,
words_per_min REAL,
tokens_per_min REAL,
lookup_hit_rate REAL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
@@ -656,7 +651,6 @@ export function ensureSchema(db: DatabaseSync): void {
total_sessions INTEGER NOT NULL DEFAULT 0,
total_active_min REAL NOT NULL DEFAULT 0,
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_words_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
CREATED_DATE INTEGER,
@@ -895,7 +889,6 @@ export function ensureSchema(db: DatabaseSync): void {
addColumnIfMissing(db, 'imm_sessions', 'total_watched_ms', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'active_watched_ms', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'lines_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'words_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'tokens_seen', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'cards_mined', 'INTEGER NOT NULL DEFAULT 0');
addColumnIfMissing(db, 'imm_sessions', 'lookup_count', 'INTEGER NOT NULL DEFAULT 0');
@@ -930,13 +923,6 @@ export function ensureSchema(db: DatabaseSync): void {
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), lines_seen),
words_seen = COALESCE((
SELECT t.words_seen
FROM imm_session_telemetry t
WHERE t.session_id = imm_sessions.session_id
ORDER BY t.sample_ms DESC, t.telemetry_id DESC
LIMIT 1
), words_seen),
tokens_seen = COALESCE((
SELECT t.tokens_seen
FROM imm_session_telemetry t
@@ -1163,17 +1149,17 @@ export function createTrackerPreparedStatements(db: DatabaseSync): TrackerPrepar
telemetryInsertStmt: db.prepare(`
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms,
lines_seen, words_seen, tokens_seen, cards_mined, lookup_count,
lines_seen, tokens_seen, cards_mined, lookup_count,
lookup_hits, yomitan_lookup_count, pause_count, pause_ms, seek_forward_count,
seek_backward_count, media_buffer_events, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
)
`),
eventInsertStmt: db.prepare(`
INSERT INTO imm_session_events (
session_id, ts_ms, event_type, line_index, segment_start_ms, segment_end_ms,
words_delta, cards_delta, payload_json, CREATED_DATE, LAST_UPDATE_DATE
tokens_delta, cards_delta, payload_json, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?
)
@@ -1310,7 +1296,6 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
write.totalWatchedMs!,
write.activeWatchedMs!,
write.linesSeen!,
write.wordsSeen!,
write.tokensSeen!,
write.cardsMined!,
write.lookupCount!,
@@ -1381,7 +1366,7 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
write.lineIndex ?? null,
write.segmentStartMs ?? null,
write.segmentEndMs ?? null,
write.wordsDelta ?? 0,
write.tokensDelta ?? 0,
write.cardsDelta ?? 0,
write.payloadJson ?? null,
Date.now(),

View File

@@ -56,7 +56,6 @@ export interface TelemetryAccumulator {
totalWatchedMs: number;
activeWatchedMs: number;
linesSeen: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
lookupCount: number;
@@ -89,7 +88,6 @@ interface QueuedTelemetryWrite {
totalWatchedMs?: number;
activeWatchedMs?: number;
linesSeen?: number;
wordsSeen?: number;
tokensSeen?: number;
cardsMined?: number;
lookupCount?: number;
@@ -104,7 +102,7 @@ interface QueuedTelemetryWrite {
lineIndex?: number | null;
segmentStartMs?: number | null;
segmentEndMs?: number | null;
wordsDelta?: number;
tokensDelta?: number;
cardsDelta?: number;
payloadJson?: string | null;
}
@@ -117,7 +115,7 @@ interface QueuedEventWrite {
lineIndex?: number | null;
segmentStartMs?: number | null;
segmentEndMs?: number | null;
wordsDelta?: number;
tokensDelta?: number;
cardsDelta?: number;
payloadJson?: string | null;
}
@@ -231,7 +229,6 @@ export interface SessionSummaryQueryRow {
totalWatchedMs: number;
activeWatchedMs: number;
linesSeen: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
lookupCount: number;
@@ -255,7 +252,6 @@ export interface LifetimeAnimeRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalLinesSeen: number;
totalTokensSeen: number;
episodesStarted: number;
@@ -269,7 +265,6 @@ export interface LifetimeMediaRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalLinesSeen: number;
totalTokensSeen: number;
completed: number;
@@ -374,7 +369,6 @@ export interface SessionTimelineRow {
totalWatchedMs: number;
activeWatchedMs: number;
linesSeen: number;
wordsSeen: number;
tokensSeen: number;
cardsMined: number;
}
@@ -385,11 +379,10 @@ export interface ImmersionSessionRollupRow {
totalSessions: number;
totalActiveMin: number;
totalLinesSeen: number;
totalWordsSeen: number;
totalTokensSeen: number;
totalCards: number;
cardsPerHour: number | null;
wordsPerMin: number | null;
tokensPerMin: number | null;
lookupHitRate: number | null;
}
@@ -421,7 +414,7 @@ export interface MediaLibraryRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalTokensSeen: number;
lastWatchedMs: number;
hasCoverArt: number;
}
@@ -432,7 +425,7 @@ export interface MediaDetailRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalTokensSeen: number;
totalLinesSeen: number;
totalLookupCount: number;
totalLookupHits: number;
@@ -446,7 +439,7 @@ export interface AnimeLibraryRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalTokensSeen: number;
episodeCount: number;
episodesTotal: number | null;
lastWatchedMs: number;
@@ -463,7 +456,7 @@ export interface AnimeDetailRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalTokensSeen: number;
totalLinesSeen: number;
totalLookupCount: number;
totalLookupHits: number;
@@ -491,7 +484,7 @@ export interface AnimeEpisodeRow {
totalSessions: number;
totalActiveMs: number;
totalCards: number;
totalWordsSeen: number;
totalTokensSeen: number;
totalYomitanLookupCount: number;
lastWatchedMs: number;
}