Fix immersion tracker SQLite timestamp truncation

- Bind epoch ms values as text to avoid libsql numeric truncation
- Update retention, lifetime, and query tests for string timestamps
- Add backlog ticket for the SQLite timestamp bug
This commit is contained in:
2026-03-30 19:52:18 -07:00
parent c8e42b3973
commit 6118c46192
17 changed files with 1015 additions and 486 deletions

View File

@@ -0,0 +1,29 @@
---
id: TASK-261
title: Fix immersion tracker SQLite timestamp truncation
status: In Progress
assignee: []
created_date: '2026-03-31 01:45'
labels:
- immersion-tracker
- sqlite
- bug
dependencies: []
references:
- src/core/services/immersion-tracker
priority: medium
ordinal: 1200
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Current-epoch millisecond values are being truncated by the libsql driver when bound as numeric parameters, which corrupts session, telemetry, lifetime, and rollup timestamps.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Current-epoch millisecond timestamps persist correctly in session, telemetry, lifetime, and rollup tables
- [ ] #2 Startup backfill and destroy/finalize flows keep retained sessions and lifetime summaries consistent
- [ ] #3 Regression tests cover the destroyed-session, startup backfill, and distinct-day/distinct-video lifetime semantics
<!-- AC:END -->

View File

@@ -5,6 +5,7 @@ import os from 'node:os';
import path from 'node:path';
import { toMonthKey } from './immersion-tracker/maintenance';
import { enqueueWrite } from './immersion-tracker/queue';
import { toDbTimestamp } from './immersion-tracker/query-shared';
import { Database, type DatabaseSync } from './immersion-tracker/sqlite';
import { nowMs as trackerNowMs } from './immersion-tracker/time';
import {
@@ -185,7 +186,7 @@ test('destroy finalizes active session and persists final telemetry', async () =
const db = new Database(dbPath);
const sessionRow = db.prepare('SELECT ended_at_ms FROM imm_sessions LIMIT 1').get() as {
ended_at_ms: number | null;
ended_at_ms: string | number | null;
} | null;
const telemetryCountRow = db
.prepare('SELECT COUNT(*) AS total FROM imm_session_telemetry')
@@ -193,7 +194,7 @@ test('destroy finalizes active session and persists final telemetry', async () =
db.close();
assert.ok(sessionRow);
assert.ok(Number(sessionRow?.ended_at_ms ?? 0) > 0);
assert.notEqual(sessionRow?.ended_at_ms, null);
assert.ok(Number(telemetryCountRow.total) >= 2);
} finally {
tracker?.destroy();
@@ -504,7 +505,7 @@ test('rebuildLifetimeSummaries backfills retained ended sessions and resets stal
episodes_started: number;
episodes_completed: number;
anime_completed: number;
last_rebuilt_ms: number | null;
last_rebuilt_ms: string | number | null;
} | null;
const appliedSessions = rebuildApi.db
.prepare('SELECT COUNT(*) AS total FROM imm_lifetime_applied_sessions')
@@ -518,7 +519,7 @@ test('rebuildLifetimeSummaries backfills retained ended sessions and resets stal
assert.equal(globalRow?.episodes_started, 2);
assert.equal(globalRow?.episodes_completed, 2);
assert.equal(globalRow?.anime_completed, 1);
assert.equal(globalRow?.last_rebuilt_ms, rebuild.rebuiltAtMs);
assert.equal(globalRow?.last_rebuilt_ms, toDbTimestamp(rebuild.rebuiltAtMs));
assert.equal(appliedSessions?.total, 2);
} finally {
tracker?.destroy();
@@ -629,7 +630,8 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
const startedAtMs = trackerNowMs() - 10_000;
const sampleMs = startedAtMs + 5_000;
db.exec(`
db.prepare(
`
INSERT INTO imm_anime (
anime_id,
canonical_title,
@@ -637,15 +639,12 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
episodes_total,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
1,
'KonoSuba',
'konosuba',
10,
${startedAtMs},
${startedAtMs}
);
) VALUES (?, ?, ?, ?, ?, ?)
`,
).run(1, 'KonoSuba', 'konosuba', 10, toDbTimestamp(startedAtMs), toDbTimestamp(startedAtMs));
db.prepare(
`
INSERT INTO imm_videos (
video_id,
video_key,
@@ -656,7 +655,9 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
duration_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
1,
'local:/tmp/konosuba-s02e05.mkv',
'KonoSuba S02E05',
@@ -664,10 +665,12 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
1,
1,
0,
${startedAtMs},
${startedAtMs}
toDbTimestamp(startedAtMs),
toDbTimestamp(startedAtMs),
);
db.prepare(
`
INSERT INTO imm_sessions (
session_id,
session_uuid,
@@ -677,17 +680,21 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
ended_media_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
1,
'11111111-1111-1111-1111-111111111111',
1,
${startedAtMs},
toDbTimestamp(startedAtMs),
1,
321000,
${startedAtMs},
${sampleMs}
toDbTimestamp(startedAtMs),
toDbTimestamp(sampleMs),
);
db.prepare(
`
INSERT INTO imm_session_telemetry (
session_id,
sample_ms,
@@ -703,23 +710,9 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
seek_forward_count,
seek_backward_count,
media_buffer_events
) VALUES (
1,
${sampleMs},
5000,
4000,
12,
120,
2,
5,
3,
1,
250,
1,
0,
0
);
`);
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(1, toDbTimestamp(sampleMs), 5000, 4000, 12, 120, 2, 5, 3, 1, 250, 1, 0, 0);
tracker.destroy();
tracker = new Ctor({ dbPath });
@@ -734,7 +727,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
`,
)
.get() as {
ended_at_ms: number | null;
ended_at_ms: string | number | null;
status: number;
ended_media_ms: number | null;
active_watched_ms: number;
@@ -769,7 +762,7 @@ test('startup finalizes stale active sessions and applies lifetime summaries', a
.get() as { total: number } | null;
assert.ok(sessionRow);
assert.ok(Number(sessionRow?.ended_at_ms ?? 0) >= sampleMs);
assert.equal(sessionRow?.ended_at_ms, toDbTimestamp(sampleMs));
assert.equal(sessionRow?.status, 2);
assert.equal(sessionRow?.ended_media_ms, 321_000);
assert.equal(sessionRow?.active_watched_ms, 4000);

View File

@@ -309,6 +309,9 @@ export class ImmersionTrackerService {
private readonly eventsRetentionMs: number;
private readonly telemetryRetentionMs: number;
private readonly sessionsRetentionMs: number;
private readonly eventsRetentionDays: number | null;
private readonly telemetryRetentionDays: number | null;
private readonly sessionsRetentionDays: number | null;
private readonly dailyRollupRetentionMs: number;
private readonly monthlyRollupRetentionMs: number;
private readonly vacuumIntervalMs: number;
@@ -365,46 +368,54 @@ export class ImmersionTrackerService {
);
const retention = policy.retention ?? {};
const daysToRetentionMs = (
const daysToRetentionWindow = (
value: number | undefined,
fallbackMs: number,
fallbackDays: number,
maxDays: number,
): number => {
const fallbackDays = Math.floor(fallbackMs / 86_400_000);
): { ms: number; days: number | null } => {
const resolvedDays = resolveBoundedInt(value, fallbackDays, 0, maxDays);
return resolvedDays === 0 ? Number.POSITIVE_INFINITY : resolvedDays * 86_400_000;
return {
ms: resolvedDays === 0 ? Number.POSITIVE_INFINITY : resolvedDays * 86_400_000,
days: resolvedDays === 0 ? null : resolvedDays,
};
};
this.eventsRetentionMs = daysToRetentionMs(
const eventsRetention = daysToRetentionWindow(
retention.eventsDays,
DEFAULT_EVENTS_RETENTION_MS,
7,
3650,
);
this.telemetryRetentionMs = daysToRetentionMs(
const telemetryRetention = daysToRetentionWindow(
retention.telemetryDays,
DEFAULT_TELEMETRY_RETENTION_MS,
30,
3650,
);
this.sessionsRetentionMs = daysToRetentionMs(
const sessionsRetention = daysToRetentionWindow(
retention.sessionsDays,
DEFAULT_SESSIONS_RETENTION_MS,
30,
3650,
);
this.dailyRollupRetentionMs = daysToRetentionMs(
this.eventsRetentionMs = eventsRetention.ms;
this.eventsRetentionDays = eventsRetention.days;
this.telemetryRetentionMs = telemetryRetention.ms;
this.telemetryRetentionDays = telemetryRetention.days;
this.sessionsRetentionMs = sessionsRetention.ms;
this.sessionsRetentionDays = sessionsRetention.days;
this.dailyRollupRetentionMs = daysToRetentionWindow(
retention.dailyRollupsDays,
DEFAULT_DAILY_ROLLUP_RETENTION_MS,
365,
36500,
);
this.monthlyRollupRetentionMs = daysToRetentionMs(
).ms;
this.monthlyRollupRetentionMs = daysToRetentionWindow(
retention.monthlyRollupsDays,
DEFAULT_MONTHLY_ROLLUP_RETENTION_MS,
5 * 365,
36500,
);
this.vacuumIntervalMs = daysToRetentionMs(
).ms;
this.vacuumIntervalMs = daysToRetentionWindow(
retention.vacuumIntervalDays,
DEFAULT_VACUUM_INTERVAL_MS,
7,
3650,
);
).ms;
this.db = new Database(this.dbPath);
applyPragmas(this.db);
ensureSchema(this.db);
@@ -1604,6 +1615,9 @@ export class ImmersionTrackerService {
eventsRetentionMs: this.eventsRetentionMs,
telemetryRetentionMs: this.telemetryRetentionMs,
sessionsRetentionMs: this.sessionsRetentionMs,
eventsRetentionDays: this.eventsRetentionDays ?? undefined,
telemetryRetentionDays: this.telemetryRetentionDays ?? undefined,
sessionsRetentionDays: this.sessionsRetentionDays ?? undefined,
});
}
if (

View File

@@ -44,6 +44,7 @@ import {
EVENT_SUBTITLE_LINE,
EVENT_YOMITAN_LOOKUP,
} from '../types.js';
import { toDbTimestamp } from '../query-shared.js';
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-imm-query-test-'));
@@ -81,29 +82,13 @@ function cleanupDbPath(dbPath: string): void {
}
}
function withMockDate<T>(fixedDate: Date, run: (realDate: typeof Date) => T): T {
const realDate = Date;
const fixedDateMs = fixedDate.getTime();
class MockDate extends Date {
constructor(...args: any[]) {
if (args.length === 0) {
super(fixedDateMs);
} else {
super(...(args as [any?, any?, any?, any?, any?, any?, any?]));
}
}
static override now(): number {
return fixedDateMs;
}
}
globalThis.Date = MockDate as DateConstructor;
function withMockNowMs<T>(fixedDateMs: string | number, run: () => T): T {
const previousNowMs = globalThis.__subminerTestNowMs;
globalThis.__subminerTestNowMs = fixedDateMs;
try {
return run(realDate);
return run();
} finally {
globalThis.Date = realDate;
globalThis.__subminerTestNowMs = previousNowMs;
}
}
@@ -613,7 +598,7 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
] as const) {
stmts.telemetryInsertStmt.run(
sessionId,
startedAtMs + 60_000,
`${startedAtMs + 60_000}`,
activeWatchedMs,
activeWatchedMs,
10,
@@ -626,8 +611,8 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
0,
0,
0,
startedAtMs + 60_000,
startedAtMs + 60_000,
`${startedAtMs + 60_000}`,
`${startedAtMs + 60_000}`,
);
db.prepare(
@@ -644,7 +629,7 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
WHERE session_id = ?
`,
).run(
startedAtMs + activeWatchedMs,
`${startedAtMs + activeWatchedMs}`,
activeWatchedMs,
activeWatchedMs,
10,
@@ -687,8 +672,8 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
'名詞',
null,
null,
Math.floor(dayOneStart / 1000),
Math.floor(dayTwoStart / 1000),
String(Math.floor(dayOneStart / 1000)),
String(Math.floor(dayTwoStart / 1000)),
);
const dashboard = getTrendsDashboard(db, 'all', 'day');
@@ -743,18 +728,50 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
parseMetadataJson: null,
});
const beforeMidnight = new Date(2026, 2, 1, 23, 30).getTime();
const afterMidnight = new Date(2026, 2, 2, 0, 30).getTime();
const firstSessionId = startSessionRecord(db, videoId, beforeMidnight).sessionId;
const secondSessionId = startSessionRecord(db, videoId, afterMidnight).sessionId;
const beforeMidnight = '1772436600000';
const afterMidnight = '1772440200000';
const firstSessionId = 1;
const secondSessionId = 2;
const insertSession = db.prepare(
`
INSERT INTO imm_sessions (
session_id,
session_uuid,
video_id,
started_at_ms,
status,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
);
insertSession.run(
firstSessionId,
'11111111-1111-1111-1111-111111111111',
videoId,
beforeMidnight,
1,
beforeMidnight,
beforeMidnight,
);
insertSession.run(
secondSessionId,
'22222222-2222-2222-2222-222222222222',
videoId,
afterMidnight,
1,
afterMidnight,
afterMidnight,
);
for (const [sessionId, startedAtMs, tokensSeen, lookupCount] of [
[firstSessionId, beforeMidnight, 100, 4],
[secondSessionId, afterMidnight, 120, 6],
] as const) {
const endedAtMs = (BigInt(startedAtMs) + 60_000n).toString();
stmts.telemetryInsertStmt.run(
sessionId,
startedAtMs + 60_000,
endedAtMs,
60_000,
60_000,
1,
@@ -767,8 +784,8 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
0,
0,
0,
startedAtMs + 60_000,
startedAtMs + 60_000,
endedAtMs,
endedAtMs,
);
db.prepare(
`
@@ -787,7 +804,7 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
WHERE session_id = ?
`,
).run(
startedAtMs + 60_000,
endedAtMs,
60_000,
60_000,
1,
@@ -795,7 +812,7 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
lookupCount,
lookupCount,
lookupCount,
startedAtMs + 60_000,
endedAtMs,
sessionId,
);
}
@@ -816,7 +833,7 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
test('getTrendsDashboard month grouping spans every touched calendar month and keeps progress monthly', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
withMockDate(new Date(2026, 2, 1, 12, 0, 0), (RealDate) => {
withMockNowMs('1772395200000', () => {
try {
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
@@ -862,18 +879,50 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
parseMetadataJson: null,
});
const febStartedAtMs = new RealDate(2026, 1, 15, 20, 0, 0).getTime();
const marStartedAtMs = new RealDate(2026, 2, 1, 9, 0, 0).getTime();
const febSessionId = startSessionRecord(db, febVideoId, febStartedAtMs).sessionId;
const marSessionId = startSessionRecord(db, marVideoId, marStartedAtMs).sessionId;
const febStartedAtMs = '1771214400000';
const marStartedAtMs = '1772384400000';
const febSessionId = 1;
const marSessionId = 2;
const insertSession = db.prepare(
`
INSERT INTO imm_sessions (
session_id,
session_uuid,
video_id,
started_at_ms,
status,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
);
insertSession.run(
febSessionId,
'33333333-3333-3333-3333-333333333333',
febVideoId,
febStartedAtMs,
1,
febStartedAtMs,
febStartedAtMs,
);
insertSession.run(
marSessionId,
'44444444-4444-4444-4444-444444444444',
marVideoId,
marStartedAtMs,
1,
marStartedAtMs,
marStartedAtMs,
);
for (const [sessionId, startedAtMs, tokensSeen, cardsMined, yomitanLookupCount] of [
[febSessionId, febStartedAtMs, 100, 2, 3],
[marSessionId, marStartedAtMs, 120, 4, 5],
] as const) {
const endedAtMs = (BigInt(startedAtMs) + 60_000n).toString();
stmts.telemetryInsertStmt.run(
sessionId,
startedAtMs + 60_000,
endedAtMs,
30 * 60_000,
30 * 60_000,
4,
@@ -886,8 +935,8 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
0,
0,
0,
startedAtMs + 60_000,
startedAtMs + 60_000,
endedAtMs,
endedAtMs,
);
db.prepare(
`
@@ -907,7 +956,7 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
WHERE session_id = ?
`,
).run(
startedAtMs + 60_000,
endedAtMs,
30 * 60_000,
30 * 60_000,
4,
@@ -916,7 +965,7 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
yomitanLookupCount,
yomitanLookupCount,
yomitanLookupCount,
startedAtMs + 60_000,
endedAtMs,
sessionId,
);
}
@@ -937,10 +986,8 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const febEpochDay = Math.floor(febStartedAtMs / 86_400_000);
const marEpochDay = Math.floor(marStartedAtMs / 86_400_000);
insertDailyRollup.run(febEpochDay, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertDailyRollup.run(marEpochDay, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
insertDailyRollup.run(20500, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertDailyRollup.run(20513, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
insertMonthlyRollup.run(202602, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertMonthlyRollup.run(202603, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
@@ -958,8 +1005,8 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
'名詞',
'',
'',
Math.floor(febStartedAtMs / 1000),
Math.floor(febStartedAtMs / 1000),
(BigInt(febStartedAtMs) / 1000n).toString(),
(BigInt(febStartedAtMs) / 1000n).toString(),
1,
);
db.prepare(
@@ -976,8 +1023,8 @@ test('getTrendsDashboard month grouping spans every touched calendar month and k
'名詞',
'',
'',
Math.floor(marStartedAtMs / 1000),
Math.floor(marStartedAtMs / 1000),
(BigInt(marStartedAtMs) / 1000n).toString(),
(BigInt(marStartedAtMs) / 1000n).toString(),
1,
);
@@ -1077,7 +1124,7 @@ test('getQueryHints computes weekly new-word cutoff from calendar midnights', ()
const dbPath = makeDbPath();
const db = new Database(dbPath);
withMockDate(new Date(2026, 2, 15, 12, 0, 0), (RealDate) => {
withMockNowMs('1773601200000', () => {
try {
ensureSchema(db);
@@ -1088,12 +1135,8 @@ test('getQueryHints computes weekly new-word cutoff from calendar midnights', ()
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const justBeforeWeekBoundary = Math.floor(
new RealDate(2026, 2, 7, 23, 30, 0).getTime() / 1000,
);
const justAfterWeekBoundary = Math.floor(
new RealDate(2026, 2, 8, 0, 30, 0).getTime() / 1000,
);
const justBeforeWeekBoundary = 1_772_955_000;
const justAfterWeekBoundary = 1_772_958_600;
insertWord.run(
'境界前',
'境界前',
@@ -1102,8 +1145,8 @@ test('getQueryHints computes weekly new-word cutoff from calendar midnights', ()
'名詞',
'',
'',
justBeforeWeekBoundary,
justBeforeWeekBoundary,
String(justBeforeWeekBoundary),
String(justBeforeWeekBoundary),
1,
);
insertWord.run(
@@ -1114,8 +1157,8 @@ test('getQueryHints computes weekly new-word cutoff from calendar midnights', ()
'名詞',
'',
'',
justAfterWeekBoundary,
justAfterWeekBoundary,
String(justAfterWeekBoundary),
String(justAfterWeekBoundary),
1,
);
@@ -1134,10 +1177,8 @@ test('getQueryHints counts new words by distinct headword first-seen time', () =
try {
ensureSchema(db);
const now = new Date();
const todayStartSec =
new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000;
withMockNowMs('1773601200000', () => {
const todayStartSec = 1_773_558_000;
const oneHourAgo = todayStartSec + 3_600;
const twoDaysAgo = todayStartSec - 2 * 86_400;
@@ -1147,25 +1188,59 @@ test('getQueryHints counts new words by distinct headword first-seen time', () =
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run('知る', '知った', 'しった', 'verb', '動詞', '', '', oneHourAgo, oneHourAgo, 1);
).run(
'知る',
'知った',
'しった',
'verb',
'動詞',
'',
'',
String(oneHourAgo),
String(oneHourAgo),
1,
);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run('知る', '知っている', 'しっている', 'verb', '動詞', '', '', oneHourAgo, oneHourAgo, 1);
).run(
'知る',
'知っている',
'しっている',
'verb',
'動詞',
'',
'',
String(oneHourAgo),
String(oneHourAgo),
1,
);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run('猫', '猫', 'ねこ', 'noun', '名詞', '', '', twoDaysAgo, twoDaysAgo, 1);
).run(
'猫',
'猫',
'ねこ',
'noun',
'名詞',
'',
'',
String(twoDaysAgo),
String(twoDaysAgo),
1,
);
const hints = getQueryHints(db);
assert.equal(hints.newWordsToday, 1);
assert.equal(hints.newWordsThisWeek, 2);
});
} finally {
db.close();
cleanupDbPath(dbPath);
@@ -2020,7 +2095,7 @@ test('getSessionWordsByLine joins word occurrences through imm_words.id', () =>
try {
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
const startedAtMs = Date.UTC(2025, 0, 1, 12, 0, 0);
const startedAtMs = 1_735_732_800_000;
const videoId = getOrCreateVideoRecord(db, '/tmp/session-words-by-line.mkv', {
canonicalTitle: 'Episode',
sourcePath: '/tmp/session-words-by-line.mkv',

View File

@@ -1,6 +1,7 @@
import type { DatabaseSync } from './sqlite';
import { finalizeSessionRecord } from './session';
import { nowMs } from './time';
import { toDbTimestamp } from './query-shared';
import type { LifetimeRebuildSummary, SessionState } from './types';
interface TelemetryRow {
@@ -41,8 +42,8 @@ interface LifetimeAnimeStateRow {
interface RetainedSessionRow {
sessionId: number;
videoId: number;
startedAtMs: number;
endedAtMs: number;
startedAtMs: number | string;
endedAtMs: number | string;
lastMediaMs: number | null;
totalWatchedMs: number;
activeWatchedMs: number;
@@ -65,25 +66,29 @@ function hasRetainedPriorSession(
startedAtMs: number,
currentSessionId: number,
): boolean {
return (
Number(
(
db
const row = db
.prepare(
`
SELECT COUNT(*) AS count
SELECT 1 AS found
FROM imm_sessions
WHERE video_id = ?
AND (
started_at_ms < ?
OR (started_at_ms = ? AND session_id < ?)
CAST(started_at_ms AS REAL) < CAST(? AS REAL)
OR (
CAST(started_at_ms AS REAL) = CAST(? AS REAL)
AND session_id < ?
)
)
LIMIT 1
`,
)
.get(videoId, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
)?.count ?? 0,
) > 0
);
.get(
videoId,
toDbTimestamp(startedAtMs),
toDbTimestamp(startedAtMs),
currentSessionId,
) as { found: number } | null;
return Boolean(row);
}
function isFirstSessionForLocalDay(
@@ -91,23 +96,37 @@ function isFirstSessionForLocalDay(
currentSessionId: number,
startedAtMs: number,
): boolean {
return (
(
db
const row = db
.prepare(
`
SELECT COUNT(*) AS count
SELECT 1 AS found
FROM imm_sessions
WHERE date(started_at_ms / 1000, 'unixepoch', 'localtime') = date(? / 1000, 'unixepoch', 'localtime')
AND (
started_at_ms < ?
OR (started_at_ms = ? AND session_id < ?)
WHERE session_id != ?
AND CAST(
julianday(CAST(started_at_ms AS REAL) / 1000, 'unixepoch', 'localtime') - 2440587.5
AS INTEGER
) = CAST(
julianday(CAST(? AS REAL) / 1000, 'unixepoch', 'localtime') - 2440587.5
AS INTEGER
)
AND (
CAST(started_at_ms AS REAL) < CAST(? AS REAL)
OR (
CAST(started_at_ms AS REAL) = CAST(? AS REAL)
AND session_id < ?
)
)
LIMIT 1
`,
)
.get(startedAtMs, startedAtMs, startedAtMs, currentSessionId) as ExistenceRow | null
)?.count === 0
);
.get(
currentSessionId,
toDbTimestamp(startedAtMs),
toDbTimestamp(startedAtMs),
toDbTimestamp(startedAtMs),
currentSessionId,
) as { found: number } | null;
return !row;
}
function resetLifetimeSummaries(db: DatabaseSync, nowMs: number): void {
@@ -131,14 +150,14 @@ function resetLifetimeSummaries(db: DatabaseSync, nowMs: number): void {
LAST_UPDATE_DATE = ?
WHERE global_id = 1
`,
).run(nowMs, nowMs);
).run(toDbTimestamp(nowMs), toDbTimestamp(nowMs));
}
function rebuildLifetimeSummariesInternal(
db: DatabaseSync,
rebuiltAtMs: number,
): LifetimeRebuildSummary {
const sessions = db
const rows = db
.prepare(
`
SELECT
@@ -146,6 +165,7 @@ function rebuildLifetimeSummariesInternal(
video_id AS videoId,
started_at_ms AS startedAtMs,
ended_at_ms AS endedAtMs,
ended_media_ms AS lastMediaMs,
total_watched_ms AS totalWatchedMs,
active_watched_ms AS activeWatchedMs,
lines_seen AS linesSeen,
@@ -164,7 +184,19 @@ function rebuildLifetimeSummariesInternal(
ORDER BY started_at_ms ASC, session_id ASC
`,
)
.all() as RetainedSessionRow[];
.all() as Array<
Omit<RetainedSessionRow, 'startedAtMs' | 'endedAtMs' | 'lastMediaMs'> & {
startedAtMs: number | string;
endedAtMs: number | string;
lastMediaMs: number | string | null;
}
>;
const sessions = rows.map((row) => ({
...row,
startedAtMs: row.startedAtMs,
endedAtMs: row.endedAtMs,
lastMediaMs: row.lastMediaMs === null ? null : Number(row.lastMediaMs),
})) as RetainedSessionRow[];
resetLifetimeSummaries(db, rebuiltAtMs);
for (const session of sessions) {
@@ -181,9 +213,9 @@ function toRebuildSessionState(row: RetainedSessionRow): SessionState {
return {
sessionId: row.sessionId,
videoId: row.videoId,
startedAtMs: row.startedAtMs,
startedAtMs: row.startedAtMs as number,
currentLineIndex: 0,
lastWallClockMs: row.endedAtMs,
lastWallClockMs: row.endedAtMs as number,
lastMediaMs: row.lastMediaMs,
lastPauseStartMs: null,
isPaused: false,
@@ -206,7 +238,7 @@ function toRebuildSessionState(row: RetainedSessionRow): SessionState {
}
function getRetainedStaleActiveSessions(db: DatabaseSync): RetainedSessionRow[] {
return db
const rows = db
.prepare(
`
SELECT
@@ -241,20 +273,32 @@ function getRetainedStaleActiveSessions(db: DatabaseSync): RetainedSessionRow[]
ORDER BY s.started_at_ms ASC, s.session_id ASC
`,
)
.all() as RetainedSessionRow[];
.all() as Array<
Omit<RetainedSessionRow, 'startedAtMs' | 'endedAtMs' | 'lastMediaMs'> & {
startedAtMs: number | string;
endedAtMs: number | string;
lastMediaMs: number | string | null;
}
>;
return rows.map((row) => ({
...row,
startedAtMs: row.startedAtMs,
endedAtMs: row.endedAtMs,
lastMediaMs: row.lastMediaMs === null ? null : Number(row.lastMediaMs),
})) as RetainedSessionRow[];
}
function upsertLifetimeMedia(
db: DatabaseSync,
videoId: number,
nowMs: number,
nowMs: number | string,
activeMs: number,
cardsMined: number,
linesSeen: number,
tokensSeen: number,
completed: number,
startedAtMs: number,
endedAtMs: number,
startedAtMs: number | string,
endedAtMs: number | string,
): void {
db.prepare(
`
@@ -310,15 +354,15 @@ function upsertLifetimeMedia(
function upsertLifetimeAnime(
db: DatabaseSync,
animeId: number,
nowMs: number,
nowMs: number | string,
activeMs: number,
cardsMined: number,
linesSeen: number,
tokensSeen: number,
episodesStartedDelta: number,
episodesCompletedDelta: number,
startedAtMs: number,
endedAtMs: number,
startedAtMs: number | string,
endedAtMs: number | string,
): void {
db.prepare(
`
@@ -377,8 +421,9 @@ function upsertLifetimeAnime(
export function applySessionLifetimeSummary(
db: DatabaseSync,
session: SessionState,
endedAtMs: number,
endedAtMs: number | string,
): void {
const updatedAtMs = toDbTimestamp(nowMs());
const applyResult = db
.prepare(
`
@@ -393,7 +438,7 @@ export function applySessionLifetimeSummary(
ON CONFLICT(session_id) DO NOTHING
`,
)
.run(session.sessionId, endedAtMs, nowMs(), nowMs());
.run(session.sessionId, endedAtMs, updatedAtMs, updatedAtMs);
if ((applyResult.changes ?? 0) <= 0) {
return;
@@ -468,7 +513,6 @@ export function applySessionLifetimeSummary(
? 1
: 0;
const updatedAtMs = nowMs();
db.prepare(
`
UPDATE imm_lifetime_global

View File

@@ -11,6 +11,7 @@ import {
toMonthKey,
} from './maintenance';
import { ensureSchema } from './storage';
import { toDbTimestamp } from './query-shared';
function makeDbPath(): string {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-maintenance-test-'));
@@ -39,18 +40,18 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
1, 'local:/tmp/video.mkv', 'Video', 1, 0, '${toDbTimestamp(nowMs)}', '${toDbTimestamp(nowMs)}'
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES
(1, 'session-1', 1, ${staleEndedAtMs - 1_000}, ${staleEndedAtMs}, 2, ${staleEndedAtMs}, ${staleEndedAtMs}),
(2, 'session-2', 1, ${keptEndedAtMs - 1_000}, ${keptEndedAtMs}, 2, ${keptEndedAtMs}, ${keptEndedAtMs});
(1, 'session-1', 1, '${toDbTimestamp(staleEndedAtMs - 1_000)}', '${toDbTimestamp(staleEndedAtMs)}', 2, '${toDbTimestamp(staleEndedAtMs)}', '${toDbTimestamp(staleEndedAtMs)}'),
(2, 'session-2', 1, '${toDbTimestamp(keptEndedAtMs - 1_000)}', '${toDbTimestamp(keptEndedAtMs)}', 2, '${toDbTimestamp(keptEndedAtMs)}', '${toDbTimestamp(keptEndedAtMs)}');
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES
(1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs}),
(2, ${nowMs - 10_000_000}, 0, 0, ${nowMs}, ${nowMs});
(1, '${toDbTimestamp(nowMs - 200_000_000)}', 0, 0, '${toDbTimestamp(nowMs)}', '${toDbTimestamp(nowMs)}'),
(2, '${toDbTimestamp(nowMs - 10_000_000)}', 0, 0, '${toDbTimestamp(nowMs)}', '${toDbTimestamp(nowMs)}');
`);
const result = pruneRawRetention(db, nowMs, {
@@ -94,22 +95,22 @@ test('pruneRawRetention skips disabled retention windows', () => {
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
1, 'local:/tmp/video.mkv', 'Video', 1, 0, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'session-1', 1, ${nowMs - 1_000}, ${nowMs - 500}, 2, ${nowMs}, ${nowMs}
1, 'session-1', 1, '${nowMs - 1_000}', '${nowMs - 500}', 2, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, ${nowMs - 2_000}, 0, 0, ${nowMs}, ${nowMs}
1, '${nowMs - 2_000}', 0, 0, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_session_events (
session_id, event_type, ts_ms, payload_json, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 1, ${nowMs - 3_000}, '{}', ${nowMs}, ${nowMs}
1, 1, '${nowMs - 3_000}', '{}', '${nowMs}', '${nowMs}'
);
`);
@@ -161,17 +162,17 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
1, 'local:/tmp/video.mkv', 'Video', 1, 0, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'session-1', 1, ${nowMs - 200_000_000}, ${nowMs - 199_999_000}, 2, ${nowMs}, ${nowMs}
1, 'session-1', 1, '${nowMs - 200_000_000}', '${nowMs - 199_999_000}', 2, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, ${nowMs - 200_000_000}, 0, 0, ${nowMs}, ${nowMs}
1, '${nowMs - 200_000_000}', 0, 0, '${nowMs}', '${nowMs}'
);
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
@@ -183,7 +184,7 @@ test('raw retention keeps rollups and rollup retention prunes them separately',
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
${oldMonth}, 1, 1, 10, 1, 1, 1, ${nowMs}, ${nowMs}
${oldMonth}, 1, 1, 10, 1, 1, 1, '${nowMs}', '${nowMs}'
);
`);

View File

@@ -1,13 +1,13 @@
import type { DatabaseSync } from './sqlite';
import { nowMs } from './time';
import { toDbMs } from './query-shared';
import { subtractDbTimestamp, toDbTimestamp } from './query-shared';
const ROLLUP_STATE_KEY = 'last_rollup_sample_ms';
const DAILY_MS = 86_400_000;
const ZERO_ID = 0;
interface RollupStateRow {
state_value: number;
state_value: string;
}
interface RollupGroupRow {
@@ -51,12 +51,25 @@ export function pruneRawRetention(
eventsRetentionMs: number;
telemetryRetentionMs: number;
sessionsRetentionMs: number;
eventsRetentionDays?: number;
telemetryRetentionDays?: number;
sessionsRetentionDays?: number;
},
): RawRetentionResult {
const resolveCutoff = (
retentionMs: number,
retentionDays: number | undefined,
): string => {
if (retentionDays !== undefined) {
return subtractDbTimestamp(currentMs, BigInt(retentionDays) * 86_400_000n);
}
return subtractDbTimestamp(currentMs, retentionMs);
};
const deletedSessionEvents = Number.isFinite(policy.eventsRetentionMs)
? (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(
toDbMs(currentMs - policy.eventsRetentionMs),
resolveCutoff(policy.eventsRetentionMs, policy.eventsRetentionDays),
) as { changes: number }
).changes
: 0;
@@ -64,14 +77,18 @@ export function pruneRawRetention(
? (
db
.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`)
.run(toDbMs(currentMs - policy.telemetryRetentionMs)) as { changes: number }
.run(resolveCutoff(policy.telemetryRetentionMs, policy.telemetryRetentionDays)) as {
changes: number;
}
).changes
: 0;
const deletedEndedSessions = Number.isFinite(policy.sessionsRetentionMs)
? (
db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(toDbMs(currentMs - policy.sessionsRetentionMs)) as { changes: number }
.run(resolveCutoff(policy.sessionsRetentionMs, policy.sessionsRetentionDays)) as {
changes: number;
}
).changes
: 0;
@@ -115,14 +132,14 @@ export function pruneRollupRetention(
};
}
function getLastRollupSampleMs(db: DatabaseSync): number {
function getLastRollupSampleMs(db: DatabaseSync): string {
const row = db
.prepare(`SELECT state_value FROM imm_rollup_state WHERE state_key = ? LIMIT 1`)
.get(ROLLUP_STATE_KEY) as unknown as RollupStateRow | null;
return row ? Number(row.state_value) : ZERO_ID;
return row ? row.state_value : String(ZERO_ID);
}
function setLastRollupSampleMs(db: DatabaseSync, sampleMs: number | bigint): void {
function setLastRollupSampleMs(db: DatabaseSync, sampleMs: number | bigint | string): void {
db.prepare(
`INSERT INTO imm_rollup_state (state_key, state_value)
VALUES (?, ?)
@@ -141,7 +158,7 @@ function resetRollups(db: DatabaseSync): void {
function upsertDailyRollupsForGroups(
db: DatabaseSync,
groups: Array<{ rollupDay: number; videoId: number }>,
rollupNowMs: bigint,
rollupNowMs: number | string,
): void {
if (groups.length === 0) {
return;
@@ -217,7 +234,7 @@ function upsertDailyRollupsForGroups(
function upsertMonthlyRollupsForGroups(
db: DatabaseSync,
groups: Array<{ rollupMonth: number; videoId: number }>,
rollupNowMs: bigint,
rollupNowMs: number | string,
): void {
if (groups.length === 0) {
return;
@@ -268,7 +285,7 @@ function upsertMonthlyRollupsForGroups(
function getAffectedRollupGroups(
db: DatabaseSync,
lastRollupSampleMs: number,
lastRollupSampleMs: number | string,
): Array<{ rollupDay: number; rollupMonth: number; videoId: number }> {
return (
db
@@ -321,7 +338,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo
return;
}
const rollupNowMs = toDbMs(nowMs());
const rollupNowMs = toDbTimestamp(nowMs());
const lastRollupSampleMs = getLastRollupSampleMs(db);
const maxSampleRow = db
@@ -356,7 +373,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo
try {
upsertDailyRollupsForGroups(db, dailyGroups, rollupNowMs);
upsertMonthlyRollupsForGroups(db, monthlyGroups, rollupNowMs);
setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID));
setLastRollupSampleMs(db, toDbTimestamp(maxSampleRow.maxSampleMs ?? ZERO_ID));
db.exec('COMMIT');
} catch (error) {
db.exec('ROLLBACK');
@@ -365,7 +382,7 @@ export function runRollupMaintenance(db: DatabaseSync, forceRebuild = false): vo
}
export function rebuildRollupsInTransaction(db: DatabaseSync): void {
const rollupNowMs = toDbMs(nowMs());
const rollupNowMs = toDbTimestamp(nowMs());
const maxSampleRow = db
.prepare('SELECT MAX(sample_ms) AS maxSampleMs FROM imm_session_telemetry')
.get() as unknown as RollupTelemetryResult | null;
@@ -377,7 +394,7 @@ export function rebuildRollupsInTransaction(db: DatabaseSync): void {
const affectedGroups = getAffectedRollupGroups(db, ZERO_ID);
if (affectedGroups.length === 0) {
setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID));
setLastRollupSampleMs(db, toDbTimestamp(maxSampleRow.maxSampleMs ?? ZERO_ID));
return;
}
@@ -396,7 +413,7 @@ export function rebuildRollupsInTransaction(db: DatabaseSync): void {
upsertDailyRollupsForGroups(db, dailyGroups, rollupNowMs);
upsertMonthlyRollupsForGroups(db, monthlyGroups, rollupNowMs);
setLastRollupSampleMs(db, toDbMs(maxSampleRow.maxSampleMs ?? ZERO_ID));
setLastRollupSampleMs(db, toDbTimestamp(maxSampleRow.maxSampleMs ?? ZERO_ID));
}
export function runOptimizeMaintenance(db: DatabaseSync): void {

View File

@@ -12,6 +12,7 @@ import type {
WordDetailRow,
WordOccurrenceRow,
} from './types';
import { fromDbTimestamp } from './query-shared';
export function getVocabularyStats(
db: DatabaseSync,
@@ -134,7 +135,11 @@ export function getSessionEvents(
SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload
FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ?
`);
return stmt.all(sessionId, limit) as SessionEventRow[];
const rows = stmt.all(sessionId, limit) as Array<SessionEventRow & { tsMs: number | string }>;
return rows.map((row) => ({
...row,
tsMs: fromDbTimestamp(row.tsMs) ?? 0,
}));
}
const placeholders = eventTypes.map(() => '?').join(', ');
@@ -145,7 +150,13 @@ export function getSessionEvents(
ORDER BY ts_ms ASC
LIMIT ?
`);
return stmt.all(sessionId, ...eventTypes, limit) as SessionEventRow[];
const rows = stmt.all(sessionId, ...eventTypes, limit) as Array<SessionEventRow & {
tsMs: number | string;
}>;
return rows.map((row) => ({
...row,
tsMs: fromDbTimestamp(row.tsMs) ?? 0,
}));
}
export function getWordDetail(db: DatabaseSync, wordId: number): WordDetailRow | null {

View File

@@ -16,10 +16,10 @@ import type {
StreakCalendarRow,
WatchTimePerAnimeRow,
} from './types';
import { ACTIVE_SESSION_METRICS_CTE, resolvedCoverBlobExpr } from './query-shared';
import { ACTIVE_SESSION_METRICS_CTE, fromDbTimestamp, resolvedCoverBlobExpr } from './query-shared';
export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] {
return db
const rows = db
.prepare(
`
SELECT
@@ -40,11 +40,15 @@ export function getAnimeLibrary(db: DatabaseSync): AnimeLibraryRow[] {
ORDER BY totalActiveMs DESC, lm.last_watched_ms DESC, canonicalTitle ASC
`,
)
.all() as unknown as AnimeLibraryRow[];
.all() as Array<AnimeLibraryRow & { lastWatchedMs: number | string }>;
return rows.map((row) => ({
...row,
lastWatchedMs: fromDbTimestamp(row.lastWatchedMs) ?? 0,
}));
}
export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRow | null {
return db
const row = db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
@@ -75,7 +79,13 @@ export function getAnimeDetail(db: DatabaseSync, animeId: number): AnimeDetailRo
GROUP BY a.anime_id
`,
)
.get(animeId) as unknown as AnimeDetailRow | null;
.get(animeId) as (AnimeDetailRow & { lastWatchedMs: number | string }) | null;
return row
? {
...row,
lastWatchedMs: fromDbTimestamp(row.lastWatchedMs) ?? 0,
}
: null;
}
export function getAnimeAnilistEntries(db: DatabaseSync, animeId: number): AnimeAnilistEntryRow[] {
@@ -98,7 +108,7 @@ export function getAnimeAnilistEntries(db: DatabaseSync, animeId: number): Anime
}
export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisodeRow[] {
return db
const rows = db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
@@ -168,11 +178,21 @@ export function getAnimeEpisodes(db: DatabaseSync, animeId: number): AnimeEpisod
v.video_id ASC
`,
)
.all(animeId) as unknown as AnimeEpisodeRow[];
.all(animeId) as Array<
AnimeEpisodeRow & {
endedMediaMs: number | string | null;
lastWatchedMs: number | string;
}
>;
return rows.map((row) => ({
...row,
endedMediaMs: fromDbTimestamp(row.endedMediaMs),
lastWatchedMs: fromDbTimestamp(row.lastWatchedMs) ?? 0,
}));
}
export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
return db
const rows = db
.prepare(
`
SELECT
@@ -205,7 +225,11 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
ORDER BY lm.last_watched_ms DESC
`,
)
.all() as unknown as MediaLibraryRow[];
.all() as Array<MediaLibraryRow & { lastWatchedMs: number | string }>;
return rows.map((row) => ({
...row,
lastWatchedMs: fromDbTimestamp(row.lastWatchedMs) ?? 0,
}));
}
export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRow | null {
@@ -253,7 +277,7 @@ export function getMediaSessions(
videoId: number,
limit = 100,
): SessionSummaryQueryRow[] {
return db
const rows = db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
@@ -279,7 +303,17 @@ export function getMediaSessions(
LIMIT ?
`,
)
.all(videoId, limit) as unknown as SessionSummaryQueryRow[];
.all(videoId, limit) as Array<
SessionSummaryQueryRow & {
startedAtMs: number | string;
endedAtMs: number | string | null;
}
>;
return rows.map((row) => ({
...row,
startedAtMs: fromDbTimestamp(row.startedAtMs) ?? 0,
endedAtMs: fromDbTimestamp(row.endedAtMs),
}));
}
export function getMediaDailyRollups(
@@ -351,7 +385,7 @@ export function getAnimeDailyRollups(
export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow | null {
const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab');
return db
const row = db
.prepare(
`
SELECT
@@ -372,12 +406,18 @@ export function getAnimeCoverArt(db: DatabaseSync, animeId: number): MediaArtRow
LIMIT 1
`,
)
.get(animeId) as unknown as MediaArtRow | null;
.get(animeId) as (MediaArtRow & { fetchedAtMs: number | string }) | null;
return row
? {
...row,
fetchedAtMs: fromDbTimestamp(row.fetchedAtMs) ?? 0,
}
: null;
}
export function getCoverArt(db: DatabaseSync, videoId: number): MediaArtRow | null {
const resolvedCoverBlob = resolvedCoverBlobExpr('a', 'cab');
return db
const row = db
.prepare(
`
SELECT
@@ -394,7 +434,13 @@ export function getCoverArt(db: DatabaseSync, videoId: number): MediaArtRow | nu
WHERE a.video_id = ?
`,
)
.get(videoId) as unknown as MediaArtRow | null;
.get(videoId) as (MediaArtRow & { fetchedAtMs: number | string }) | null;
return row
? {
...row,
fetchedAtMs: fromDbTimestamp(row.fetchedAtMs) ?? 0,
}
: null;
}
export function getStreakCalendar(db: DatabaseSync, days = 90): StreakCalendarRow[] {
@@ -510,7 +556,7 @@ export function getEpisodeWords(db: DatabaseSync, videoId: number, limit = 50):
}
export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSummaryQueryRow[] {
return db
const rows = db
.prepare(
`
${ACTIVE_SESSION_METRICS_CTE}
@@ -533,7 +579,17 @@ export function getEpisodeSessions(db: DatabaseSync, videoId: number): SessionSu
ORDER BY s.started_at_ms DESC
`,
)
.all(videoId) as SessionSummaryQueryRow[];
.all(videoId) as Array<
SessionSummaryQueryRow & {
startedAtMs: number | string;
endedAtMs: number | string | null;
}
>;
return rows.map((row) => ({
...row,
startedAtMs: fromDbTimestamp(row.startedAtMs) ?? 0,
endedAtMs: fromDbTimestamp(row.endedAtMs),
}));
}
export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): EpisodeCardEventRow[] {
@@ -552,7 +608,7 @@ export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): Episode
.all(videoId) as Array<{
eventId: number;
sessionId: number;
tsMs: number;
tsMs: number | string;
cardsDelta: number;
payloadJson: string | null;
}>;
@@ -568,7 +624,7 @@ export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): Episode
return {
eventId: row.eventId,
sessionId: row.sessionId,
tsMs: row.tsMs,
tsMs: fromDbTimestamp(row.tsMs) ?? 0,
cardsDelta: row.cardsDelta,
noteIds,
};

View File

@@ -17,6 +17,7 @@ import {
getAffectedWordIdsForVideo,
refreshLexicalAggregates,
toDbMs,
toDbTimestamp,
} from './query-shared';
type CleanupVocabularyRow = {
@@ -351,7 +352,7 @@ export function upsertCoverArt(
)
.get(videoId) as { coverBlobHash: string | null } | undefined;
const sharedCoverBlobHash = findSharedCoverBlobHash(db, videoId, art.anilistId, art.coverUrl);
const fetchedAtMs = toDbMs(nowMs());
const fetchedAtMs = toDbTimestamp(nowMs());
const coverBlob = normalizeCoverBlobBytes(art.coverBlob);
const computedCoverBlobHash =
coverBlob && coverBlob.length > 0
@@ -444,7 +445,7 @@ export function updateAnimeAnilistInfo(
info.titleEnglish,
info.titleNative,
info.episodesTotal,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
row.anime_id,
);
}
@@ -452,7 +453,7 @@ export function updateAnimeAnilistInfo(
export function markVideoWatched(db: DatabaseSync, videoId: number, watched: boolean): void {
db.prepare('UPDATE imm_videos SET watched = ?, LAST_UPDATE_DATE = ? WHERE video_id = ?').run(
watched ? 1 : 0,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
videoId,
);
}

View File

@@ -1,11 +1,17 @@
import type { DatabaseSync } from './sqlite';
import { nowMs } from './time';
import type {
ImmersionSessionRollupRow,
SessionSummaryQueryRow,
SessionTimelineRow,
} from './types';
import { ACTIVE_SESSION_METRICS_CTE } from './query-shared';
import {
ACTIVE_SESSION_METRICS_CTE,
currentDbTimestamp,
fromDbTimestamp,
getLocalEpochDay,
getShiftedLocalDaySec,
toDbTimestamp,
} from './query-shared';
export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummaryQueryRow[] {
const prepared = db.prepare(`
@@ -33,7 +39,15 @@ export function getSessionSummaries(db: DatabaseSync, limit = 50): SessionSummar
ORDER BY s.started_at_ms DESC
LIMIT ?
`);
return prepared.all(limit) as unknown as SessionSummaryQueryRow[];
const rows = prepared.all(limit) as Array<SessionSummaryQueryRow & {
startedAtMs: number | string;
endedAtMs: number | string | null;
}>;
return rows.map((row) => ({
...row,
startedAtMs: fromDbTimestamp(row.startedAtMs) ?? 0,
endedAtMs: fromDbTimestamp(row.endedAtMs),
}));
}
export function getSessionTimeline(
@@ -55,11 +69,23 @@ export function getSessionTimeline(
`;
if (limit === undefined) {
return db.prepare(select).all(sessionId) as unknown as SessionTimelineRow[];
const rows = db.prepare(select).all(sessionId) as Array<SessionTimelineRow & {
sampleMs: number | string;
}>;
return rows.map((row) => ({
...row,
sampleMs: fromDbTimestamp(row.sampleMs) ?? 0,
}));
}
return db
const rows = db
.prepare(`${select}\n LIMIT ?`)
.all(sessionId, limit) as unknown as SessionTimelineRow[];
.all(sessionId, limit) as Array<SessionTimelineRow & {
sampleMs: number | string;
}>;
return rows.map((row) => ({
...row,
sampleMs: fromDbTimestamp(row.sampleMs) ?? 0,
}));
}
/** Returns all distinct headwords in the vocabulary table (global). */
@@ -129,35 +155,50 @@ export function getSessionWordsByLine(
}
function getNewWordCounts(db: DatabaseSync): { newWordsToday: number; newWordsThisWeek: number } {
const now = new Date();
const todayStartSec = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000;
const weekAgoSec =
new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7).getTime() / 1000;
const currentTimestamp = currentDbTimestamp();
const todayStartSec = getShiftedLocalDaySec(db, currentTimestamp, 0);
const weekAgoSec = getShiftedLocalDaySec(db, currentTimestamp, -7);
const row = db
const rows = db
.prepare(
`
WITH headword_first_seen AS (
SELECT
headword,
MIN(first_seen) AS first_seen
first_seen AS firstSeen
FROM imm_words
WHERE first_seen IS NOT NULL
AND headword IS NOT NULL
AND headword != ''
GROUP BY headword
)
SELECT
COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS today,
COALESCE(SUM(CASE WHEN first_seen >= ? THEN 1 ELSE 0 END), 0) AS week
FROM headword_first_seen
`,
)
.get(todayStartSec, weekAgoSec) as { today: number; week: number } | null;
.all() as Array<{ headword: string; firstSeen: number | string }>;
const firstSeenByHeadword = new Map<string, number>();
for (const row of rows) {
const firstSeen = Number(row.firstSeen);
if (!Number.isFinite(firstSeen)) {
continue;
}
const previous = firstSeenByHeadword.get(row.headword);
if (previous === undefined || firstSeen < previous) {
firstSeenByHeadword.set(row.headword, firstSeen);
}
}
let today = 0;
let week = 0;
for (const firstSeen of firstSeenByHeadword.values()) {
if (firstSeen >= todayStartSec) {
today += 1;
}
if (firstSeen >= weekAgoSec) {
week += 1;
}
}
return {
newWordsToday: Number(row?.today ?? 0),
newWordsThisWeek: Number(row?.week ?? 0),
newWordsToday: today,
newWordsThisWeek: week,
};
}
@@ -203,10 +244,8 @@ export function getQueryHints(db: DatabaseSync): {
animeCompleted: number;
} | null;
const now = new Date();
const todayLocal = Math.floor(
new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 86_400_000,
);
const currentTimestamp = currentDbTimestamp();
const todayLocal = getLocalEpochDay(db, currentTimestamp);
const episodesToday =
(
@@ -215,13 +254,16 @@ export function getQueryHints(db: DatabaseSync): {
`
SELECT COUNT(DISTINCT s.video_id) AS count
FROM imm_sessions s
WHERE CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) = ?
WHERE CAST(
julianday(CAST(s.started_at_ms AS REAL) / 1000, 'unixepoch', 'localtime') - 2440587.5
AS INTEGER
) = ?
`,
)
.get(todayLocal) as { count: number }
)?.count ?? 0;
const thirtyDaysAgoMs = nowMs() - 30 * 86400000;
const thirtyDaysAgoMs = getShiftedLocalDaySec(db, currentTimestamp, -30).toString() + '000';
const activeAnimeCount =
(
db

View File

@@ -1,4 +1,5 @@
import type { DatabaseSync } from './sqlite';
import { nowMs } from './time';
export const ACTIVE_SESSION_METRICS_CTE = `
WITH active_session_metrics AS (
@@ -280,3 +281,213 @@ export function toDbMs(ms: number | bigint): bigint {
}
return BigInt(Math.trunc(ms));
}
function normalizeTimestampString(value: string): string {
const trimmed = value.trim();
if (!trimmed) {
throw new TypeError(`Invalid database timestamp: ${value}`);
}
const integerLike = /^(-?)(\d+)(?:\.0+)?$/.exec(trimmed);
if (integerLike) {
const sign = integerLike[1] ?? '';
const digits = (integerLike[2] ?? '0').replace(/^0+(?=\d)/, '');
return `${sign}${digits || '0'}`;
}
const parsed = Number(trimmed);
if (!Number.isFinite(parsed)) {
throw new TypeError(`Invalid database timestamp: ${value}`);
}
return JSON.stringify(Math.trunc(parsed));
}
export function toDbTimestamp(ms: number | bigint | string): string {
const normalizeParsed = (parsed: number): string => JSON.stringify(Math.trunc(parsed));
if (typeof ms === 'bigint') {
return ms.toString();
}
if (typeof ms === 'string') {
return normalizeTimestampString(ms);
}
if (!Number.isFinite(ms)) {
throw new TypeError(`Invalid database timestamp: ${ms}`);
}
return normalizeParsed(ms);
}
export function currentDbTimestamp(): string {
const testNowMs = globalThis.__subminerTestNowMs;
if (typeof testNowMs === 'string') {
return normalizeTimestampString(testNowMs);
}
if (typeof testNowMs === 'number' && Number.isFinite(testNowMs)) {
return toDbTimestamp(testNowMs);
}
return toDbTimestamp(nowMs());
}
export function subtractDbTimestamp(
timestampMs: number | bigint | string,
deltaMs: number | bigint,
): string {
return (BigInt(toDbTimestamp(timestampMs)) - BigInt(deltaMs)).toString();
}
export function fromDbTimestamp(ms: number | bigint | string | null | undefined): number | null {
if (ms === null || ms === undefined) {
return null;
}
if (typeof ms === 'number') {
return ms;
}
if (typeof ms === 'bigint') {
return Number(ms);
}
return Number(ms);
}
function getNumericCalendarValue(
db: DatabaseSync,
sql: string,
timestampMs: number | bigint | string,
): number {
const row = db.prepare(sql).get(toDbTimestamp(timestampMs)) as
| { value: number | string | null }
| undefined;
return Number(row?.value ?? 0);
}
export function getLocalEpochDay(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getNumericCalendarValue(
db,
`
SELECT CAST(
julianday(CAST(? AS REAL) / 1000, 'unixepoch', 'localtime') - 2440587.5
AS INTEGER
) AS value
`,
timestampMs,
);
}
export function getLocalMonthKey(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getNumericCalendarValue(
db,
`
SELECT CAST(
strftime('%Y%m', CAST(? AS REAL) / 1000, 'unixepoch', 'localtime')
AS INTEGER
) AS value
`,
timestampMs,
);
}
export function getLocalDayOfWeek(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getNumericCalendarValue(
db,
`
SELECT CAST(
strftime('%w', CAST(? AS REAL) / 1000, 'unixepoch', 'localtime')
AS INTEGER
) AS value
`,
timestampMs,
);
}
export function getLocalHourOfDay(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getNumericCalendarValue(
db,
`
SELECT CAST(
strftime('%H', CAST(? AS REAL) / 1000, 'unixepoch', 'localtime')
AS INTEGER
) AS value
`,
timestampMs,
);
}
export function getStartOfLocalDaySec(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getNumericCalendarValue(
db,
`
SELECT CAST(
strftime(
'%s',
CAST(? AS REAL) / 1000,
'unixepoch',
'localtime',
'start of day',
'utc'
) AS INTEGER
) AS value
`,
timestampMs,
);
}
export function getStartOfLocalDayTimestamp(
db: DatabaseSync,
timestampMs: number | bigint | string,
): string {
return `${getStartOfLocalDaySec(db, timestampMs)}000`;
}
export function getShiftedLocalDayTimestamp(
db: DatabaseSync,
timestampMs: number | bigint | string,
dayOffset: number,
): string {
const normalizedDayOffset = Math.trunc(dayOffset);
const modifier = normalizedDayOffset >= 0 ? `+${normalizedDayOffset} days` : `${normalizedDayOffset} days`;
const row = db
.prepare(
`
SELECT strftime(
'%s',
CAST(? AS REAL) / 1000,
'unixepoch',
'localtime',
'start of day',
'${modifier}',
'utc'
) AS value
`,
)
.get(toDbTimestamp(timestampMs)) as { value: string | number | null } | undefined;
return `${row?.value ?? '0'}000`;
}
export function getShiftedLocalDaySec(
db: DatabaseSync,
timestampMs: number | bigint | string,
dayOffset: number,
): number {
return Number(BigInt(getShiftedLocalDayTimestamp(db, timestampMs, dayOffset)) / 1000n);
}
export function getStartOfLocalDayMs(
db: DatabaseSync,
timestampMs: number | bigint | string,
): number {
return getStartOfLocalDaySec(db, timestampMs) * 1000;
}

View File

@@ -1,6 +1,16 @@
import type { DatabaseSync } from './sqlite';
import type { ImmersionSessionRollupRow } from './types';
import { ACTIVE_SESSION_METRICS_CTE, makePlaceholders } from './query-shared';
import {
ACTIVE_SESSION_METRICS_CTE,
currentDbTimestamp,
getLocalDayOfWeek,
getLocalEpochDay,
getLocalHourOfDay,
getLocalMonthKey,
getShiftedLocalDayTimestamp,
makePlaceholders,
toDbTimestamp,
} from './query-shared';
import { getDailyRollups, getMonthlyRollups } from './query-sessions';
type TrendRange = '7d' | '30d' | '90d' | 'all';
@@ -19,6 +29,10 @@ interface TrendPerAnimePoint {
interface TrendSessionMetricRow {
startedAtMs: number;
epochDay: number;
monthKey: number;
dayOfWeek: number;
hourOfDay: number;
videoId: number | null;
canonicalTitle: string | null;
animeTitle: string | null;
@@ -73,64 +87,64 @@ const TREND_DAY_LIMITS: Record<Exclude<TrendRange, 'all'>, number> = {
'90d': 90,
};
const MONTH_NAMES = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];
const DAY_NAMES = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'];
function getTrendDayLimit(range: TrendRange): number {
return range === 'all' ? 365 : TREND_DAY_LIMITS[range];
}
function getTrendMonthlyLimit(range: TrendRange): number {
function getTrendMonthlyLimit(db: DatabaseSync, range: TrendRange): number {
if (range === 'all') {
return 120;
}
const now = new Date();
const cutoff = new Date(
now.getFullYear(),
now.getMonth(),
now.getDate() - (TREND_DAY_LIMITS[range] - 1),
);
return Math.max(1, (now.getFullYear() - cutoff.getFullYear()) * 12 + now.getMonth() - cutoff.getMonth() + 1);
const currentTimestamp = currentDbTimestamp();
const todayStartMs = getShiftedLocalDayTimestamp(db, currentTimestamp, 0);
const cutoffMs = getShiftedLocalDayTimestamp(db, currentTimestamp, -(TREND_DAY_LIMITS[range] - 1));
const currentMonthKey = getLocalMonthKey(db, todayStartMs);
const cutoffMonthKey = getLocalMonthKey(db, cutoffMs);
const currentYear = Math.floor(currentMonthKey / 100);
const currentMonth = currentMonthKey % 100;
const cutoffYear = Math.floor(cutoffMonthKey / 100);
const cutoffMonth = cutoffMonthKey % 100;
return Math.max(1, (currentYear - cutoffYear) * 12 + currentMonth - cutoffMonth + 1);
}
function getTrendCutoffMs(range: TrendRange): number | null {
function getTrendCutoffMs(db: DatabaseSync, range: TrendRange): string | null {
if (range === 'all') {
return null;
}
const dayLimit = getTrendDayLimit(range);
const now = new Date();
const localMidnight = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime();
return localMidnight - (dayLimit - 1) * 86_400_000;
return getShiftedLocalDayTimestamp(db, currentDbTimestamp(), -(getTrendDayLimit(range) - 1));
}
function dayPartsFromEpochDay(epochDay: number): { year: number; month: number; day: number } {
const z = epochDay + 719468;
const era = Math.floor(z / 146097);
const doe = z - era * 146097;
const yoe = Math.floor(
(doe - Math.floor(doe / 1460) + Math.floor(doe / 36524) - Math.floor(doe / 146096)) / 365,
);
let year = yoe + era * 400;
const doy = doe - (365 * yoe + Math.floor(yoe / 4) - Math.floor(yoe / 100));
const mp = Math.floor((5 * doy + 2) / 153);
const day = doy - Math.floor((153 * mp + 2) / 5) + 1;
const month = mp < 10 ? mp + 3 : mp - 9;
if (month <= 2) {
year += 1;
}
return { year, month, day };
}
function makeTrendLabel(value: number): string {
if (value > 100_000) {
const year = Math.floor(value / 100);
const month = value % 100;
return new Date(Date.UTC(year, month - 1, 1)).toLocaleDateString(undefined, {
month: 'short',
year: '2-digit',
});
return `${MONTH_NAMES[month - 1]} ${String(year).slice(-2)}`;
}
return new Date(value * 86_400_000).toLocaleDateString(undefined, {
month: 'short',
day: 'numeric',
});
}
function getLocalEpochDay(timestampMs: number): number {
const date = new Date(timestampMs);
return Math.floor((timestampMs - date.getTimezoneOffset() * 60_000) / 86_400_000);
}
function getLocalDateForEpochDay(epochDay: number): Date {
const utcDate = new Date(epochDay * 86_400_000);
return new Date(utcDate.getTime() + utcDate.getTimezoneOffset() * 60_000);
}
function getLocalMonthKey(timestampMs: number): number {
const date = new Date(timestampMs);
return date.getFullYear() * 100 + date.getMonth() + 1;
const { month, day } = dayPartsFromEpochDay(value);
return `${MONTH_NAMES[month - 1]} ${day}`;
}
function getTrendSessionWordCount(session: Pick<TrendSessionMetricRow, 'tokensSeen'>): number {
@@ -189,7 +203,7 @@ function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) {
function buildWatchTimeByDayOfWeek(sessions: TrendSessionMetricRow[]): TrendChartPoint[] {
const totals = new Array(7).fill(0);
for (const session of sessions) {
totals[new Date(session.startedAtMs).getDay()] += session.activeWatchedMs;
totals[session.dayOfWeek] += session.activeWatchedMs;
}
return DAY_NAMES.map((name, index) => ({
label: name,
@@ -200,7 +214,7 @@ function buildWatchTimeByDayOfWeek(sessions: TrendSessionMetricRow[]): TrendChar
function buildWatchTimeByHour(sessions: TrendSessionMetricRow[]): TrendChartPoint[] {
const totals = new Array(24).fill(0);
for (const session of sessions) {
totals[new Date(session.startedAtMs).getHours()] += session.activeWatchedMs;
totals[session.hourOfDay] += session.activeWatchedMs;
}
return totals.map((ms, index) => ({
label: `${String(index).padStart(2, '0')}:00`,
@@ -209,10 +223,8 @@ function buildWatchTimeByHour(sessions: TrendSessionMetricRow[]): TrendChartPoin
}
function dayLabel(epochDay: number): string {
return getLocalDateForEpochDay(epochDay).toLocaleDateString(undefined, {
month: 'short',
day: 'numeric',
});
const { month, day } = dayPartsFromEpochDay(epochDay);
return `${MONTH_NAMES[month - 1]} ${day}`;
}
function buildSessionSeriesByDay(
@@ -221,8 +233,7 @@ function buildSessionSeriesByDay(
): TrendChartPoint[] {
const byDay = new Map<number, number>();
for (const session of sessions) {
const epochDay = getLocalEpochDay(session.startedAtMs);
byDay.set(epochDay, (byDay.get(epochDay) ?? 0) + getValue(session));
byDay.set(session.epochDay, (byDay.get(session.epochDay) ?? 0) + getValue(session));
}
return Array.from(byDay.entries())
.sort(([left], [right]) => left - right)
@@ -235,8 +246,7 @@ function buildSessionSeriesByMonth(
): TrendChartPoint[] {
const byMonth = new Map<number, number>();
for (const session of sessions) {
const monthKey = getLocalMonthKey(session.startedAtMs);
byMonth.set(monthKey, (byMonth.get(monthKey) ?? 0) + getValue(session));
byMonth.set(session.monthKey, (byMonth.get(session.monthKey) ?? 0) + getValue(session));
}
return Array.from(byMonth.entries())
.sort(([left], [right]) => left - right)
@@ -251,8 +261,7 @@ function buildLookupsPerHundredWords(
const wordsByBucket = new Map<number, number>();
for (const session of sessions) {
const bucketKey =
groupBy === 'month' ? getLocalMonthKey(session.startedAtMs) : getLocalEpochDay(session.startedAtMs);
const bucketKey = groupBy === 'month' ? session.monthKey : session.epochDay;
lookupsByBucket.set(
bucketKey,
(lookupsByBucket.get(bucketKey) ?? 0) + session.yomitanLookupCount,
@@ -282,7 +291,7 @@ function buildPerAnimeFromSessions(
for (const session of sessions) {
const animeTitle = resolveTrendAnimeTitle(session);
const epochDay = getLocalEpochDay(session.startedAtMs);
const epochDay = session.epochDay;
const dayMap = byAnime.get(animeTitle) ?? new Map();
dayMap.set(epochDay, (dayMap.get(epochDay) ?? 0) + getValue(session));
byAnime.set(animeTitle, dayMap);
@@ -303,7 +312,7 @@ function buildLookupsPerHundredPerAnime(sessions: TrendSessionMetricRow[]): Tren
for (const session of sessions) {
const animeTitle = resolveTrendAnimeTitle(session);
const epochDay = getLocalEpochDay(session.startedAtMs);
const epochDay = session.epochDay;
const lookupMap = lookups.get(animeTitle) ?? new Map();
lookupMap.set(epochDay, (lookupMap.get(epochDay) ?? 0) + session.yomitanLookupCount);
@@ -498,9 +507,10 @@ function buildEpisodesPerMonthFromRollups(rollups: ImmersionSessionRollupRow[]):
function getTrendSessionMetrics(
db: DatabaseSync,
cutoffMs: number | null,
cutoffMs: string | null,
): TrendSessionMetricRow[] {
const whereClause = cutoffMs === null ? '' : 'WHERE s.started_at_ms >= ?';
const cutoffValue = cutoffMs === null ? null : toDbTimestamp(cutoffMs);
const prepared = db.prepare(`
${ACTIVE_SESSION_METRICS_CTE}
SELECT
@@ -520,14 +530,27 @@ function getTrendSessionMetrics(
ORDER BY s.started_at_ms ASC
`);
return (cutoffMs === null ? prepared.all() : prepared.all(cutoffMs)) as TrendSessionMetricRow[];
const rows = (cutoffValue === null ? prepared.all() : prepared.all(cutoffValue)) as Array<
TrendSessionMetricRow & { startedAtMs: number | string }
>;
return rows.map((row) => ({
...row,
startedAtMs: 0,
epochDay: getLocalEpochDay(db, row.startedAtMs),
monthKey: getLocalMonthKey(db, row.startedAtMs),
dayOfWeek: getLocalDayOfWeek(db, row.startedAtMs),
hourOfDay: getLocalHourOfDay(db, row.startedAtMs),
}));
}
function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] {
function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: string | null): TrendChartPoint[] {
const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?';
const prepared = db.prepare(`
SELECT
CAST(julianday(first_seen, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS epochDay,
CAST(
julianday(CAST(first_seen AS REAL), 'unixepoch', 'localtime') - 2440587.5
AS INTEGER
) AS epochDay,
COUNT(*) AS wordCount
FROM imm_words
WHERE first_seen IS NOT NULL
@@ -537,7 +560,7 @@ function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendCh
`);
const rows = (
cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000))
cutoffMs === null ? prepared.all() : prepared.all((BigInt(cutoffMs) / 1000n).toString())
) as Array<{
epochDay: number;
wordCount: number;
@@ -549,11 +572,14 @@ function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendCh
}));
}
function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] {
function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: string | null): TrendChartPoint[] {
const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?';
const prepared = db.prepare(`
SELECT
CAST(strftime('%Y%m', first_seen, 'unixepoch', 'localtime') AS INTEGER) AS monthKey,
CAST(
strftime('%Y%m', CAST(first_seen AS REAL), 'unixepoch', 'localtime')
AS INTEGER
) AS monthKey,
COUNT(*) AS wordCount
FROM imm_words
WHERE first_seen IS NOT NULL
@@ -563,7 +589,7 @@ function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: number | null): Trend
`);
const rows = (
cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000))
cutoffMs === null ? prepared.all() : prepared.all((BigInt(cutoffMs) / 1000n).toString())
) as Array<{
monthKey: number;
wordCount: number;
@@ -581,8 +607,8 @@ export function getTrendsDashboard(
groupBy: TrendGroupBy = 'day',
): TrendsDashboardQueryResult {
const dayLimit = getTrendDayLimit(range);
const monthlyLimit = getTrendMonthlyLimit(range);
const cutoffMs = getTrendCutoffMs(range);
const monthlyLimit = getTrendMonthlyLimit(db, range);
const cutoffMs = getTrendCutoffMs(db, range);
const useMonthlyBuckets = groupBy === 'month';
const dailyRollups = getDailyRollups(db, dayLimit);
const monthlyRollups = getMonthlyRollups(db, monthlyLimit);

View File

@@ -4,7 +4,7 @@ import { createInitialSessionState } from './reducer';
import { nowMs } from './time';
import { SESSION_STATUS_ACTIVE, SESSION_STATUS_ENDED } from './types';
import type { SessionState } from './types';
import { toDbMs } from './query-shared';
import { toDbMs, toDbTimestamp } from './query-shared';
export function startSessionRecord(
db: DatabaseSync,
@@ -25,10 +25,10 @@ export function startSessionRecord(
.run(
sessionUuid,
videoId,
toDbMs(startedAtMs),
toDbTimestamp(startedAtMs),
SESSION_STATUS_ACTIVE,
toDbMs(startedAtMs),
toDbMs(createdAtMs),
toDbTimestamp(startedAtMs),
toDbTimestamp(createdAtMs),
);
const sessionId = Number(result.lastInsertRowid);
return {
@@ -40,7 +40,7 @@ export function startSessionRecord(
export function finalizeSessionRecord(
db: DatabaseSync,
sessionState: SessionState,
endedAtMs = nowMs(),
endedAtMs: number | string = nowMs(),
): void {
db.prepare(
`
@@ -66,7 +66,7 @@ export function finalizeSessionRecord(
WHERE session_id = ?
`,
).run(
toDbMs(endedAtMs),
toDbTimestamp(endedAtMs),
SESSION_STATUS_ENDED,
sessionState.lastMediaMs === null ? null : toDbMs(sessionState.lastMediaMs),
sessionState.totalWatchedMs,
@@ -82,7 +82,7 @@ export function finalizeSessionRecord(
sessionState.seekForwardCount,
sessionState.seekBackwardCount,
sessionState.mediaBufferEvents,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
sessionState.sessionId,
);
}

View File

@@ -143,10 +143,10 @@ test('ensureSchema creates immersion core tables', () => {
const rollupStateRow = db
.prepare('SELECT state_value FROM imm_rollup_state WHERE state_key = ?')
.get('last_rollup_sample_ms') as {
state_value: number;
state_value: string;
} | null;
assert.ok(rollupStateRow);
assert.equal(rollupStateRow?.state_value, 0);
assert.equal(Number(rollupStateRow?.state_value ?? 0), 0);
} finally {
db.close();
cleanupDbPath(dbPath);
@@ -965,12 +965,12 @@ test('start/finalize session updates ended_at and status', () => {
const row = db
.prepare('SELECT ended_at_ms, status FROM imm_sessions WHERE session_id = ?')
.get(sessionId) as {
ended_at_ms: number | null;
ended_at_ms: string | null;
status: number;
} | null;
assert.ok(row);
assert.equal(row?.ended_at_ms, endedAtMs);
assert.equal(Number(row?.ended_at_ms ?? 0), endedAtMs);
assert.equal(row?.status, SESSION_STATUS_ENDED);
} finally {
db.close();

View File

@@ -4,7 +4,7 @@ import type { DatabaseSync } from './sqlite';
import { nowMs } from './time';
import { SCHEMA_VERSION } from './types';
import type { QueuedWrite, VideoMetadata, YoutubeVideoMetadata } from './types';
import { toDbMs } from './query-shared';
import { toDbMs, toDbTimestamp } from './query-shared';
export interface TrackerPreparedStatements {
telemetryInsertStmt: ReturnType<DatabaseSync['prepare']>;
@@ -130,7 +130,7 @@ function deduplicateExistingCoverArtRows(db: DatabaseSync): void {
return;
}
const nowMsValue = toDbMs(nowMs());
const nowMsValue = toDbTimestamp(nowMs());
const upsertBlobStmt = db.prepare(`
INSERT INTO imm_cover_art_blobs (blob_hash, cover_blob, CREATED_DATE, LAST_UPDATE_DATE)
VALUES (?, ?, ?, ?)
@@ -275,7 +275,7 @@ function parseLegacyAnimeBackfillCandidate(
}
function ensureLifetimeSummaryTables(db: DatabaseSync): void {
const nowMsValue = toDbMs(nowMs());
const nowMsValue = toDbTimestamp(nowMs());
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_global(
@@ -287,9 +287,9 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
episodes_started INTEGER NOT NULL DEFAULT 0,
episodes_completed INTEGER NOT NULL DEFAULT 0,
anime_completed INTEGER NOT NULL DEFAULT 0,
last_rebuilt_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
last_rebuilt_ms TEXT,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT
)
`);
@@ -332,10 +332,10 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
episodes_started INTEGER NOT NULL DEFAULT 0,
episodes_completed INTEGER NOT NULL DEFAULT 0,
first_watched_ms INTEGER,
last_watched_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
first_watched_ms TEXT,
last_watched_ms TEXT,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE CASCADE
)
`);
@@ -349,10 +349,10 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
completed INTEGER NOT NULL DEFAULT 0,
first_watched_ms INTEGER,
last_watched_ms INTEGER,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
first_watched_ms TEXT,
last_watched_ms TEXT,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
)
`);
@@ -360,9 +360,9 @@ function ensureLifetimeSummaryTables(db: DatabaseSync): void {
db.exec(`
CREATE TABLE IF NOT EXISTS imm_lifetime_applied_sessions(
session_id INTEGER PRIMARY KEY,
applied_at_ms INTEGER NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
applied_at_ms TEXT NOT NULL,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
)
`);
@@ -405,13 +405,13 @@ export function getOrCreateAnimeRecord(db: DatabaseSync, input: AnimeRecordInput
input.titleEnglish,
input.titleNative,
input.metadataJson,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
existing.anime_id,
);
return existing.anime_id;
}
const nowMsValue = toDbMs(nowMs());
const nowMsValue = toDbTimestamp(nowMs());
const result = db
.prepare(
`
@@ -471,7 +471,7 @@ export function linkVideoToAnimeRecord(
input.parserSource,
input.parserConfidence,
input.parseMetadataJson,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
videoId,
);
}
@@ -562,13 +562,13 @@ export function ensureSchema(db: DatabaseSync): void {
db.exec(`
CREATE TABLE IF NOT EXISTS imm_schema_version (
schema_version INTEGER PRIMARY KEY,
applied_at_ms INTEGER NOT NULL
applied_at_ms TEXT NOT NULL
);
`);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_rollup_state(
state_key TEXT PRIMARY KEY,
state_value INTEGER NOT NULL
state_value TEXT NOT NULL
);
`);
db.exec(`
@@ -597,8 +597,8 @@ export function ensureSchema(db: DatabaseSync): void {
episodes_total INTEGER,
description TEXT,
metadata_json TEXT,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT
);
`);
db.exec(`
@@ -625,8 +625,8 @@ export function ensureSchema(db: DatabaseSync): void {
bitrate_kbps INTEGER, audio_codec_id INTEGER,
hash_sha256 TEXT, screenshot_path TEXT,
metadata_json TEXT,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE SET NULL
);
`);
@@ -635,7 +635,7 @@ export function ensureSchema(db: DatabaseSync): void {
session_id INTEGER PRIMARY KEY AUTOINCREMENT,
session_uuid TEXT NOT NULL UNIQUE,
video_id INTEGER NOT NULL,
started_at_ms INTEGER NOT NULL, ended_at_ms INTEGER,
started_at_ms TEXT NOT NULL, ended_at_ms TEXT,
status INTEGER NOT NULL,
locale_id INTEGER, target_lang_id INTEGER,
difficulty_tier INTEGER, subtitle_mode INTEGER,
@@ -653,8 +653,8 @@ export function ensureSchema(db: DatabaseSync): void {
seek_forward_count INTEGER NOT NULL DEFAULT 0,
seek_backward_count INTEGER NOT NULL DEFAULT 0,
media_buffer_events INTEGER NOT NULL DEFAULT 0,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id)
);
`);
@@ -662,7 +662,7 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE TABLE IF NOT EXISTS imm_session_telemetry(
telemetry_id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id INTEGER NOT NULL,
sample_ms INTEGER NOT NULL,
sample_ms TEXT NOT NULL,
total_watched_ms INTEGER NOT NULL DEFAULT 0,
active_watched_ms INTEGER NOT NULL DEFAULT 0,
lines_seen INTEGER NOT NULL DEFAULT 0,
@@ -676,8 +676,8 @@ export function ensureSchema(db: DatabaseSync): void {
seek_forward_count INTEGER NOT NULL DEFAULT 0,
seek_backward_count INTEGER NOT NULL DEFAULT 0,
media_buffer_events INTEGER NOT NULL DEFAULT 0,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
);
`);
@@ -693,8 +693,8 @@ export function ensureSchema(db: DatabaseSync): void {
tokens_delta INTEGER NOT NULL DEFAULT 0,
cards_delta INTEGER NOT NULL DEFAULT 0,
payload_json TEXT,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
);
`);
@@ -710,8 +710,8 @@ export function ensureSchema(db: DatabaseSync): void {
cards_per_hour REAL,
tokens_per_min REAL,
lookup_hit_rate REAL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
PRIMARY KEY (rollup_day, video_id)
);
`);
@@ -724,8 +724,8 @@ export function ensureSchema(db: DatabaseSync): void {
total_lines_seen INTEGER NOT NULL DEFAULT 0,
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
total_cards INTEGER NOT NULL DEFAULT 0,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
PRIMARY KEY (rollup_month, video_id)
);
`);
@@ -806,9 +806,9 @@ export function ensureSchema(db: DatabaseSync): void {
title_romaji TEXT,
title_english TEXT,
episodes_total INTEGER,
fetched_at_ms INTEGER NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
fetched_at_ms TEXT NOT NULL,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
);
`);
@@ -827,9 +827,9 @@ export function ensureSchema(db: DatabaseSync): void {
uploader_url TEXT,
description TEXT,
metadata_json TEXT,
fetched_at_ms INTEGER NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
fetched_at_ms TEXT NOT NULL,
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
);
`);
@@ -837,26 +837,26 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE TABLE IF NOT EXISTS imm_cover_art_blobs(
blob_hash TEXT PRIMARY KEY,
cover_blob BLOB NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
CREATED_DATE TEXT,
LAST_UPDATE_DATE TEXT
);
`);
if (currentVersion?.schema_version === 1) {
addColumnIfMissing(db, 'imm_videos', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_videos', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_sessions', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_sessions', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_session_telemetry', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_session_telemetry', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_session_events', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_session_events', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_daily_rollups', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_daily_rollups', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_monthly_rollups', 'CREATED_DATE');
addColumnIfMissing(db, 'imm_monthly_rollups', 'LAST_UPDATE_DATE');
addColumnIfMissing(db, 'imm_videos', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_videos', 'LAST_UPDATE_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_sessions', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_sessions', 'LAST_UPDATE_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_session_telemetry', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_session_telemetry', 'LAST_UPDATE_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_session_events', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_session_events', 'LAST_UPDATE_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_daily_rollups', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_daily_rollups', 'LAST_UPDATE_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_monthly_rollups', 'CREATED_DATE', 'TEXT');
addColumnIfMissing(db, 'imm_monthly_rollups', 'LAST_UPDATE_DATE', 'TEXT');
const migratedAtMs = toDbMs(nowMs());
const migratedAtMs = toDbTimestamp(nowMs());
db.prepare(
`
UPDATE imm_videos
@@ -1243,7 +1243,7 @@ export function ensureSchema(db: DatabaseSync): void {
db.exec(`
INSERT INTO imm_schema_version(schema_version, applied_at_ms)
VALUES (${SCHEMA_VERSION}, ${toDbMs(nowMs())})
VALUES (${SCHEMA_VERSION}, ${toDbTimestamp(nowMs())})
ON CONFLICT DO NOTHING
`);
}
@@ -1401,7 +1401,7 @@ function incrementKanjiAggregate(
}
export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedStatements): void {
const currentMs = toDbMs(nowMs());
const currentMs = toDbTimestamp(nowMs());
if (write.kind === 'telemetry') {
if (
write.totalWatchedMs === undefined ||
@@ -1420,7 +1420,7 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
) {
throw new Error('Incomplete telemetry write');
}
const telemetrySampleMs = toDbMs(write.sampleMs ?? Number(currentMs));
const telemetrySampleMs = toDbTimestamp(write.sampleMs ?? Number(currentMs));
stmts.telemetryInsertStmt.run(
write.sessionId,
telemetrySampleMs,
@@ -1495,7 +1495,7 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
stmts.eventInsertStmt.run(
write.sessionId,
toDbMs(write.sampleMs ?? Number(currentMs)),
toDbTimestamp(write.sampleMs ?? Number(currentMs)),
write.eventType ?? 0,
write.lineIndex ?? null,
write.segmentStartMs ?? null,
@@ -1530,11 +1530,11 @@ export function getOrCreateVideoRecord(
LAST_UPDATE_DATE = ?
WHERE video_id = ?
`,
).run(details.canonicalTitle || 'unknown', toDbMs(nowMs()), existing.video_id);
).run(details.canonicalTitle || 'unknown', toDbTimestamp(nowMs()), existing.video_id);
return existing.video_id;
}
const currentMs = toDbMs(nowMs());
const currentMs = toDbTimestamp(nowMs());
const insert = db.prepare(`
INSERT INTO imm_videos (
video_key, canonical_title, source_type, source_path, source_url,
@@ -1604,7 +1604,7 @@ export function updateVideoMetadataRecord(
metadata.hashSha256,
metadata.screenshotPath,
metadata.metadataJson,
toDbMs(nowMs()),
toDbTimestamp(nowMs()),
videoId,
);
}
@@ -1622,7 +1622,7 @@ export function updateVideoTitleRecord(
LAST_UPDATE_DATE = ?
WHERE video_id = ?
`,
).run(canonicalTitle, toDbMs(nowMs()), videoId);
).run(canonicalTitle, toDbTimestamp(nowMs()), videoId);
}
export function upsertYoutubeVideoMetadata(
@@ -1630,7 +1630,7 @@ export function upsertYoutubeVideoMetadata(
videoId: number,
metadata: YoutubeVideoMetadata,
): void {
const currentMs = toDbMs(nowMs());
const currentMs = toDbTimestamp(nowMs());
db.prepare(
`
INSERT INTO imm_youtube_videos (

View File

@@ -1,4 +1,13 @@
declare global {
var __subminerTestNowMs: number | string | undefined;
}
export function nowMs(): number {
const testNowMs = globalThis.__subminerTestNowMs;
if (typeof testNowMs === 'number' && Number.isFinite(testNowMs)) {
return Math.floor(testNowMs);
}
const perf = globalThis.performance;
if (perf && Number.isFinite(perf.timeOrigin)) {
return Math.floor(perf.timeOrigin + perf.now());