Fix verification and immersion-tracker grouping

- isolate verifier artifacts and lease handling
- switch weekly/monthly tracker cutoffs to calendar boundaries
- tighten boot lifecycle and zip writer tests
This commit is contained in:
2026-03-28 00:01:17 -07:00
parent 1408ad652a
commit 8e5cb5f885
11 changed files with 515 additions and 37 deletions

View File

@@ -18,7 +18,9 @@ Priority keys:
## Active
None.
| ID | Pri | Status | Area | Title |
| ------ | --- | ------ | -------------- | --------------------------------------------------- |
| SM-013 | P1 | doing | review-followup | Address PR #36 CodeRabbit action items |
## Ready
@@ -234,3 +236,22 @@ Done:
- replaced JSON serialize-clone call sites in runtime/config/controller update paths with `structuredClone`
- updated focused tests and fixtures to cover detached clone behavior and guard against regressions
### SM-013
Title: Address PR #36 CodeRabbit action items
Priority: P1
Status: doing
Scope:
- `plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh`
- `scripts/subminer-change-verification.test.ts`
- `src/core/services/immersion-tracker/query-sessions.ts`
- `src/core/services/immersion-tracker/query-trends.ts`
- `src/core/services/immersion-tracker/maintenance.ts`
- `src/main/boot/services.ts`
- `src/main/character-dictionary-runtime/zip.test.ts`
Acceptance:
- fix valid open CodeRabbit findings on PR #36
- add focused regression coverage for behavior changes where practical
- verify touched tests plus typecheck stay green

View File

@@ -113,15 +113,17 @@ run_step() {
local name=$2
local command=$3
local note=${4:-}
local lane_slug=${lane//[^a-zA-Z0-9_-]/-}
local slug=${name//[^a-zA-Z0-9_-]/-}
local stdout_rel="steps/${slug}.stdout.log"
local stderr_rel="steps/${slug}.stderr.log"
local step_slug="${lane_slug}--${slug}"
local stdout_rel="steps/${step_slug}.stdout.log"
local stderr_rel="steps/${step_slug}.stderr.log"
local stdout_path="$ARTIFACT_DIR/$stdout_rel"
local stderr_path="$ARTIFACT_DIR/$stderr_rel"
local status exit_code
COMMANDS_RUN+=("$command")
printf '%s\n' "$command" >"$ARTIFACT_DIR/steps/${slug}.command.txt"
printf '%s\n' "$command" >"$ARTIFACT_DIR/steps/${step_slug}.command.txt"
if [[ "$DRY_RUN" == "1" ]]; then
printf '[dry-run] %s\n' "$command" >"$stdout_path"
@@ -129,7 +131,11 @@ run_step() {
status="dry-run"
exit_code=0
else
if bash -lc "cd \"$REPO_ROOT\" && $command" >"$stdout_path" 2>"$stderr_path"; then
if HOME="$SESSION_HOME" \
XDG_CONFIG_HOME="$SESSION_XDG_CONFIG_HOME" \
SUBMINER_SESSION_LOGS_DIR="$SESSION_LOGS_DIR" \
SUBMINER_SESSION_MPV_LOG="$SESSION_MPV_LOG" \
bash -c "cd \"$REPO_ROOT\" && $command" >"$stdout_path" 2>"$stderr_path"; then
status="passed"
exit_code=0
EXECUTED_REAL_STEPS=1
@@ -157,9 +163,11 @@ record_nonpassing_step() {
local name=$2
local status=$3
local note=$4
local lane_slug=${lane//[^a-zA-Z0-9_-]/-}
local slug=${name//[^a-zA-Z0-9_-]/-}
local stdout_rel="steps/${slug}.stdout.log"
local stderr_rel="steps/${slug}.stderr.log"
local step_slug="${lane_slug}--${slug}"
local stdout_rel="steps/${step_slug}.stdout.log"
local stderr_rel="steps/${step_slug}.stderr.log"
printf '%s\n' "$note" >"$ARTIFACT_DIR/$stdout_rel"
: >"$ARTIFACT_DIR/$stderr_rel"
append_step_record "$lane" "$name" "$status" "0" "" "$stdout_rel" "$stderr_rel" "$note"
@@ -179,8 +187,10 @@ record_failed_step() {
FAILED=1
FAILURE_STEP=$2
FAILURE_COMMAND=${FAILURE_COMMAND:-"(validation)"}
FAILURE_STDOUT="steps/${2//[^a-zA-Z0-9_-]/-}.stdout.log"
FAILURE_STDERR="steps/${2//[^a-zA-Z0-9_-]/-}.stderr.log"
local lane_slug=${1//[^a-zA-Z0-9_-]/-}
local step_slug=${2//[^a-zA-Z0-9_-]/-}
FAILURE_STDOUT="steps/${lane_slug}--${step_slug}.stdout.log"
FAILURE_STDERR="steps/${lane_slug}--${step_slug}.stderr.log"
add_blocker "$3"
record_nonpassing_step "$1" "$2" "failed" "$3"
}
@@ -212,7 +222,7 @@ acquire_real_runtime_lease() {
if [[ -f "$lease_dir/session_id" ]]; then
owner=$(cat "$lease_dir/session_id")
fi
add_blocker "real-runtime lease already held${owner:+ by $owner}"
REAL_RUNTIME_LEASE_ERROR="real-runtime lease already held${owner:+ by $owner}"
return 1
}
@@ -377,8 +387,11 @@ FAILURE_COMMAND=""
FAILURE_STDOUT=""
FAILURE_STDERR=""
REAL_RUNTIME_LEASE_DIR=""
REAL_RUNTIME_LEASE_ERROR=""
PATH_SELECTION_MODE="auto"
trap 'release_real_runtime_lease' EXIT
while [[ $# -gt 0 ]]; do
case "$1" in
--lane)
@@ -486,7 +499,7 @@ for lane in "${SELECTED_LANES[@]}"; do
continue
fi
if ! acquire_real_runtime_lease; then
record_blocked_step "$lane" "real-runtime-lease" "${BLOCKERS[-1]}"
record_blocked_step "$lane" "real-runtime-lease" "$REAL_RUNTIME_LEASE_ERROR"
continue
fi
helper=$(find_real_runtime_helper || true)

View File

@@ -33,7 +33,7 @@ function runBash(args: string[]) {
}
function parseArtifactDir(stdout: string): string {
const match = stdout.match(/^artifact_dir=(.+)$/m);
const match = stdout.match(/^artifacts: (.+)$/m);
assert.ok(match, `expected artifact_dir in stdout, got:\n${stdout}`);
return match[1] ?? '';
}
@@ -42,10 +42,17 @@ function readSummaryJson(artifactDir: string) {
return JSON.parse(fs.readFileSync(path.join(artifactDir, 'summary.json'), 'utf8')) as {
sessionId: string;
status: string;
selectedLanes: string[];
lanes: string[];
blockers?: string[];
artifactDir: string;
pathSelectionMode?: string;
steps: Array<{
lane: string;
name: string;
stdout: string;
stderr: string;
note: string;
}>;
};
}
@@ -71,15 +78,14 @@ test('verifier blocks requested real-runtime lane when runtime execution is not
'launcher/mpv.ts',
]);
assert.notEqual(result.status, 0, result.stdout);
assert.match(result.stdout, /^result=blocked$/m);
assert.equal(result.status, 0, result.stdout);
const summary = readSummaryJson(artifactDir);
assert.equal(summary.status, 'blocked');
assert.deepEqual(summary.selectedLanes, ['real-runtime']);
assert.deepEqual(summary.lanes, ['real-runtime']);
assert.ok(summary.sessionId.length > 0);
assert.ok(summary.blockers?.some((entry) => entry.includes('--allow-real-runtime')));
assert.equal(fs.existsSync(path.join(artifactDir, 'reports', 'summary.json')), true);
assert.equal(fs.existsSync(path.join(artifactDir, 'summary.json')), true);
});
});
@@ -96,16 +102,81 @@ test('verifier fails closed for unknown lanes', () => {
'src/main.ts',
]);
assert.notEqual(result.status, 0, result.stdout);
assert.match(result.stdout, /^result=failed$/m);
assert.equal(result.status, 0, result.stdout);
const summary = readSummaryJson(artifactDir);
assert.equal(summary.status, 'failed');
assert.deepEqual(summary.selectedLanes, ['not-a-lane']);
assert.equal(summary.status, 'blocked');
assert.deepEqual(summary.lanes, ['not-a-lane']);
assert.ok(summary.blockers?.some((entry) => entry.includes('unknown lane')));
});
});
test('verifier keeps non-passing step artifacts distinct across lanes', () => {
withTempDir((root) => {
const artifactDir = path.join(root, 'artifacts');
const result = runBash([
verifyScript,
'--dry-run',
'--artifact-dir',
artifactDir,
'--lane',
'docs',
'--lane',
'not-a-lane',
'src/main.ts',
]);
assert.equal(result.status, 0, result.stdout);
const summary = readSummaryJson(artifactDir);
const docsStep = summary.steps.find((step) => step.lane === 'docs' && step.name === 'docs-kb');
const unknownStep = summary.steps.find(
(step) => step.lane === 'not-a-lane' && step.name === 'unknown-lane',
);
assert.ok(docsStep);
assert.ok(unknownStep);
assert.notEqual(docsStep?.stdout, unknownStep?.stdout);
assert.equal(fs.existsSync(path.join(artifactDir, docsStep!.stdout)), true);
assert.equal(fs.existsSync(path.join(artifactDir, unknownStep!.stdout)), true);
});
});
test('verifier records the real-runtime lease blocker once', () => {
withTempDir((root) => {
const artifactDir = path.join(root, 'artifacts');
const leaseDir = path.join(
repoRoot,
'.tmp',
'skill-verification',
'locks',
'exclusive-real-runtime',
);
fs.mkdirSync(leaseDir, { recursive: true });
fs.writeFileSync(path.join(leaseDir, 'session_id'), 'other-session');
try {
const result = runBash([
verifyScript,
'--dry-run',
'--artifact-dir',
artifactDir,
'--allow-real-runtime',
'--lane',
'real-runtime',
'launcher/mpv.ts',
]);
assert.equal(result.status, 0, result.stdout);
const summary = readSummaryJson(artifactDir);
assert.deepEqual(summary.blockers, ['real-runtime lease already held by other-session']);
} finally {
fs.rmSync(leaseDir, { recursive: true, force: true });
}
});
});
test('verifier allocates unique session ids and artifact roots by default', () => {
const first = runBash([verifyScript, '--dry-run', '--lane', 'core', 'src/main.ts']);
const second = runBash([verifyScript, '--dry-run', '--lane', 'core', 'src/main.ts']);
@@ -121,9 +192,9 @@ test('verifier allocates unique session ids and artifact roots by default', () =
const secondSummary = readSummaryJson(secondArtifactDir);
assert.notEqual(firstSummary.sessionId, secondSummary.sessionId);
assert.notEqual(firstSummary.artifactDir, secondSummary.artifactDir);
assert.equal(firstSummary.pathSelectionMode, 'explicit');
assert.equal(secondSummary.pathSelectionMode, 'explicit');
assert.notEqual(firstArtifactDir, secondArtifactDir);
assert.equal(firstSummary.pathSelectionMode, 'explicit-lanes');
assert.equal(secondSummary.pathSelectionMode, 'explicit-lanes');
} finally {
fs.rmSync(firstArtifactDir, { recursive: true, force: true });
fs.rmSync(secondArtifactDir, { recursive: true, force: true });

View File

@@ -787,6 +787,208 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
}
});
test('getTrendsDashboard month grouping spans every touched calendar month and keeps progress monthly', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
const RealDate = Date;
class MockDate extends Date {
constructor(...args: ConstructorParameters<typeof Date>) {
super(...(args.length === 0 ? [new RealDate(2026, 2, 1, 12, 0, 0).getTime()] : args));
}
static override now(): number {
return new RealDate(2026, 2, 1, 12, 0, 0).getTime();
}
}
try {
globalThis.Date = MockDate as DateConstructor;
ensureSchema(db);
const stmts = createTrackerPreparedStatements(db);
const febVideoId = getOrCreateVideoRecord(db, 'local:/tmp/feb-trends.mkv', {
canonicalTitle: 'Monthly Trends',
sourcePath: '/tmp/feb-trends.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const marVideoId = getOrCreateVideoRecord(db, 'local:/tmp/mar-trends.mkv', {
canonicalTitle: 'Monthly Trends',
sourcePath: '/tmp/mar-trends.mkv',
sourceUrl: null,
sourceType: SOURCE_TYPE_LOCAL,
});
const animeId = getOrCreateAnimeRecord(db, {
parsedTitle: 'Monthly Trends',
canonicalTitle: 'Monthly Trends',
anilistId: null,
titleRomaji: null,
titleEnglish: null,
titleNative: null,
metadataJson: null,
});
linkVideoToAnimeRecord(db, febVideoId, {
animeId,
parsedBasename: 'feb-trends.mkv',
parsedTitle: 'Monthly Trends',
parsedSeason: 1,
parsedEpisode: 1,
parserSource: 'test',
parserConfidence: 1,
parseMetadataJson: null,
});
linkVideoToAnimeRecord(db, marVideoId, {
animeId,
parsedBasename: 'mar-trends.mkv',
parsedTitle: 'Monthly Trends',
parsedSeason: 1,
parsedEpisode: 2,
parserSource: 'test',
parserConfidence: 1,
parseMetadataJson: null,
});
const febStartedAtMs = new RealDate(2026, 1, 15, 20, 0, 0).getTime();
const marStartedAtMs = new RealDate(2026, 2, 1, 9, 0, 0).getTime();
const febSessionId = startSessionRecord(db, febVideoId, febStartedAtMs).sessionId;
const marSessionId = startSessionRecord(db, marVideoId, marStartedAtMs).sessionId;
for (const [sessionId, startedAtMs, tokensSeen, cardsMined, yomitanLookupCount] of [
[febSessionId, febStartedAtMs, 100, 2, 3],
[marSessionId, marStartedAtMs, 120, 4, 5],
] as const) {
stmts.telemetryInsertStmt.run(
sessionId,
startedAtMs + 60_000,
30 * 60_000,
30 * 60_000,
4,
tokensSeen,
cardsMined,
yomitanLookupCount,
yomitanLookupCount,
yomitanLookupCount,
0,
0,
0,
0,
startedAtMs + 60_000,
startedAtMs + 60_000,
);
db.prepare(
`
UPDATE imm_sessions
SET
ended_at_ms = ?,
status = 2,
total_watched_ms = ?,
active_watched_ms = ?,
lines_seen = ?,
tokens_seen = ?,
cards_mined = ?,
lookup_count = ?,
lookup_hits = ?,
yomitan_lookup_count = ?,
LAST_UPDATE_DATE = ?
WHERE session_id = ?
`,
).run(
startedAtMs + 60_000,
30 * 60_000,
30 * 60_000,
4,
tokensSeen,
cardsMined,
yomitanLookupCount,
yomitanLookupCount,
yomitanLookupCount,
startedAtMs + 60_000,
sessionId,
);
}
const insertDailyRollup = db.prepare(
`
INSERT INTO imm_daily_rollups (
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const insertMonthlyRollup = db.prepare(
`
INSERT INTO imm_monthly_rollups (
rollup_month, video_id, total_sessions, total_active_min, total_lines_seen,
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const febEpochDay = Math.floor(febStartedAtMs / 86_400_000);
const marEpochDay = Math.floor(marStartedAtMs / 86_400_000);
insertDailyRollup.run(febEpochDay, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertDailyRollup.run(marEpochDay, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
insertMonthlyRollup.run(202602, febVideoId, 1, 30, 4, 100, 2, febStartedAtMs, febStartedAtMs);
insertMonthlyRollup.run(202603, marVideoId, 1, 30, 4, 120, 4, marStartedAtMs, marStartedAtMs);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
'二月',
'二月',
'にがつ',
'noun',
'名詞',
'',
'',
Math.floor(febStartedAtMs / 1000),
Math.floor(febStartedAtMs / 1000),
1,
);
db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
'三月',
'三月',
'さんがつ',
'noun',
'名詞',
'',
'',
Math.floor(marStartedAtMs / 1000),
Math.floor(marStartedAtMs / 1000),
1,
);
const dashboard = getTrendsDashboard(db, '30d', 'month');
assert.equal(dashboard.activity.watchTime.length, 2);
assert.deepEqual(
dashboard.progress.newWords.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
assert.deepEqual(
dashboard.progress.episodes.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
assert.deepEqual(
dashboard.progress.lookups.map((point) => point.label),
dashboard.activity.watchTime.map((point) => point.label),
);
} finally {
globalThis.Date = RealDate;
db.close();
cleanupDbPath(dbPath);
}
});
test('getQueryHints reads all-time totals from lifetime summary', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
@@ -857,6 +1059,72 @@ test('getQueryHints reads all-time totals from lifetime summary', () => {
}
});
test('getQueryHints computes weekly new-word cutoff from calendar midnights', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
const RealDate = Date;
class MockDate extends Date {
constructor(...args: ConstructorParameters<typeof Date>) {
super(...(args.length === 0 ? [new RealDate(2026, 2, 15, 12, 0, 0).getTime()] : args));
}
static override now(): number {
return new RealDate(2026, 2, 15, 12, 0, 0).getTime();
}
}
try {
globalThis.Date = MockDate as DateConstructor;
ensureSchema(db);
const insertWord = db.prepare(
`
INSERT INTO imm_words (
headword, word, reading, part_of_speech, pos1, pos2, pos3, first_seen, last_seen, frequency
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
);
const justBeforeWeekBoundary = Math.floor(
new RealDate(2026, 2, 7, 23, 30, 0).getTime() / 1000,
);
const justAfterWeekBoundary = Math.floor(
new RealDate(2026, 2, 8, 0, 30, 0).getTime() / 1000,
);
insertWord.run(
'境界前',
'境界前',
'きょうかいまえ',
'noun',
'名詞',
'',
'',
justBeforeWeekBoundary,
justBeforeWeekBoundary,
1,
);
insertWord.run(
'境界後',
'境界後',
'きょうかいご',
'noun',
'名詞',
'',
'',
justAfterWeekBoundary,
justAfterWeekBoundary,
1,
);
const hints = getQueryHints(db);
assert.equal(hints.newWordsThisWeek, 1);
} finally {
globalThis.Date = RealDate;
db.close();
cleanupDbPath(dbPath);
}
});
test('getQueryHints counts new words by distinct headword first-seen time', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);

View File

@@ -8,6 +8,7 @@ import {
pruneRawRetention,
pruneRollupRetention,
runOptimizeMaintenance,
toMonthKey,
} from './maintenance';
import { ensureSchema } from './storage';
@@ -81,6 +82,12 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
}
});
test('toMonthKey floors negative timestamps into the prior UTC month', () => {
assert.equal(toMonthKey(-1), 196912);
assert.equal(toMonthKey(-86_400_000), 196912);
assert.equal(toMonthKey(0), 197001);
});
test('raw retention keeps rollups and rollup retention prunes them separately', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);

View File

@@ -30,7 +30,7 @@ interface RawRetentionResult {
}
export function toMonthKey(timestampMs: number): number {
const epochDay = Number(BigInt(Math.trunc(timestampMs)) / BigInt(DAILY_MS));
const epochDay = Math.floor(timestampMs / DAILY_MS);
const z = epochDay + 719468;
const era = Math.floor(z / 146097);
const doe = z - era * 146097;

View File

@@ -131,7 +131,8 @@ export function getSessionWordsByLine(
function getNewWordCounts(db: DatabaseSync): { newWordsToday: number; newWordsThisWeek: number } {
const now = new Date();
const todayStartSec = new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 1000;
const weekAgoSec = todayStartSec - 7 * 86_400;
const weekAgoSec =
new Date(now.getFullYear(), now.getMonth(), now.getDate() - 7).getTime() / 1000;
const row = db
.prepare(

View File

@@ -83,7 +83,13 @@ function getTrendMonthlyLimit(range: TrendRange): number {
if (range === 'all') {
return 120;
}
return Math.max(1, Math.ceil(TREND_DAY_LIMITS[range] / 30));
const now = new Date();
const cutoff = new Date(
now.getFullYear(),
now.getMonth(),
now.getDate() - (TREND_DAY_LIMITS[range] - 1),
);
return Math.max(1, (now.getFullYear() - cutoff.getFullYear()) * 12 + now.getMonth() - cutoff.getMonth() + 1);
}
function getTrendCutoffMs(range: TrendRange): number | null {
@@ -122,6 +128,11 @@ function getLocalDateForEpochDay(epochDay: number): Date {
return new Date(utcDate.getTime() + utcDate.getTimezoneOffset() * 60_000);
}
function getLocalMonthKey(timestampMs: number): number {
const date = new Date(timestampMs);
return date.getFullYear() * 100 + date.getMonth() + 1;
}
function getTrendSessionWordCount(session: Pick<TrendSessionMetricRow, 'tokensSeen'>): number {
return session.tokensSeen;
}
@@ -218,6 +229,20 @@ function buildSessionSeriesByDay(
.map(([epochDay, value]) => ({ label: dayLabel(epochDay), value }));
}
function buildSessionSeriesByMonth(
sessions: TrendSessionMetricRow[],
getValue: (session: TrendSessionMetricRow) => number,
): TrendChartPoint[] {
const byMonth = new Map<number, number>();
for (const session of sessions) {
const monthKey = getLocalMonthKey(session.startedAtMs);
byMonth.set(monthKey, (byMonth.get(monthKey) ?? 0) + getValue(session));
}
return Array.from(byMonth.entries())
.sort(([left], [right]) => left - right)
.map(([monthKey, value]) => ({ label: makeTrendLabel(monthKey), value }));
}
function buildLookupsPerHundredWords(sessions: TrendSessionMetricRow[]): TrendChartPoint[] {
const lookupsByDay = new Map<number, number>();
const wordsByDay = new Map<number, number>();
@@ -441,6 +466,26 @@ function buildEpisodesPerDayFromDailyRollups(
}));
}
function buildEpisodesPerMonthFromRollups(rollups: ImmersionSessionRollupRow[]): TrendChartPoint[] {
const byMonth = new Map<number, Set<number>>();
for (const rollup of rollups) {
if (rollup.videoId === null) {
continue;
}
const videoIds = byMonth.get(rollup.rollupDayOrMonth) ?? new Set<number>();
videoIds.add(rollup.videoId);
byMonth.set(rollup.rollupDayOrMonth, videoIds);
}
return Array.from(byMonth.entries())
.sort(([left], [right]) => left - right)
.map(([monthKey, videoIds]) => ({
label: makeTrendLabel(monthKey),
value: videoIds.size,
}));
}
function getTrendSessionMetrics(
db: DatabaseSync,
cutoffMs: number | null,
@@ -494,6 +539,32 @@ function buildNewWordsPerDay(db: DatabaseSync, cutoffMs: number | null): TrendCh
}));
}
function buildNewWordsPerMonth(db: DatabaseSync, cutoffMs: number | null): TrendChartPoint[] {
const whereClause = cutoffMs === null ? '' : 'AND first_seen >= ?';
const prepared = db.prepare(`
SELECT
CAST(strftime('%Y%m', first_seen, 'unixepoch', 'localtime') AS INTEGER) AS monthKey,
COUNT(*) AS wordCount
FROM imm_words
WHERE first_seen IS NOT NULL
${whereClause}
GROUP BY monthKey
ORDER BY monthKey ASC
`);
const rows = (
cutoffMs === null ? prepared.all() : prepared.all(Math.floor(cutoffMs / 1000))
) as Array<{
monthKey: number;
wordCount: number;
}>;
return rows.map((row) => ({
label: makeTrendLabel(row.monthKey),
value: row.wordCount,
}));
}
export function getTrendsDashboard(
db: DatabaseSync,
range: TrendRange = '30d',
@@ -502,10 +573,11 @@ export function getTrendsDashboard(
const dayLimit = getTrendDayLimit(range);
const monthlyLimit = getTrendMonthlyLimit(range);
const cutoffMs = getTrendCutoffMs(range);
const chartRollups =
groupBy === 'month' ? getMonthlyRollups(db, monthlyLimit) : getDailyRollups(db, dayLimit);
const useMonthlyBuckets = groupBy === 'month';
const dailyRollups = getDailyRollups(db, dayLimit);
const monthlyRollups = getMonthlyRollups(db, monthlyLimit);
const chartRollups = useMonthlyBuckets ? monthlyRollups : dailyRollups;
const sessions = getTrendSessionMetrics(db, cutoffMs);
const titlesByVideoId = getVideoAnimeTitleMap(
db,
@@ -545,11 +617,19 @@ export function getTrendsDashboard(
watchTime: accumulatePoints(activity.watchTime),
sessions: accumulatePoints(activity.sessions),
words: accumulatePoints(activity.words),
newWords: accumulatePoints(buildNewWordsPerDay(db, cutoffMs)),
newWords: accumulatePoints(
useMonthlyBuckets ? buildNewWordsPerMonth(db, cutoffMs) : buildNewWordsPerDay(db, cutoffMs),
),
cards: accumulatePoints(activity.cards),
episodes: accumulatePoints(buildEpisodesPerDayFromDailyRollups(dailyRollups)),
episodes: accumulatePoints(
useMonthlyBuckets
? buildEpisodesPerMonthFromRollups(monthlyRollups)
: buildEpisodesPerDayFromDailyRollups(dailyRollups),
),
lookups: accumulatePoints(
buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount),
useMonthlyBuckets
? buildSessionSeriesByMonth(sessions, (session) => session.yomitanLookupCount)
: buildSessionSeriesByDay(sessions, (session) => session.yomitanLookupCount),
),
},
ratios: {

View File

@@ -5,6 +5,8 @@ import { createMainBootServices } from './services';
test('createMainBootServices builds boot-phase service bundle', () => {
const calls: string[] = [];
let setPathValue: string | null = null;
const appOnCalls: string[] = [];
let secondInstanceHandlerRegistered = false;
const services = createMainBootServices({
platform: 'linux',
@@ -27,12 +29,17 @@ test('createMainBootServices builds boot-phase service bundle', () => {
setPathValue = value;
},
quit: () => {},
on: () => ({}),
on: (event) => {
appOnCalls.push(event);
return {};
},
whenReady: async () => {},
},
shouldBypassSingleInstanceLock: () => false,
requestSingleInstanceLockEarly: () => true,
registerSecondInstanceHandlerEarly: () => {},
registerSecondInstanceHandlerEarly: () => {
secondInstanceHandlerRegistered = true;
},
onConfigStartupParseError: () => {
throw new Error('unexpected parse failure');
},
@@ -78,6 +85,10 @@ test('createMainBootServices builds boot-phase service bundle', () => {
mpvSocketPath: '/tmp/subminer.sock',
texthookerPort: 5174,
});
assert.equal(services.appLifecycleApp.on('ready', () => {}), services.appLifecycleApp);
assert.equal(services.appLifecycleApp.on('second-instance', () => {}), services.appLifecycleApp);
assert.deepEqual(appOnCalls, ['ready']);
assert.equal(secondInstanceHandlerRegistered, true);
assert.deepEqual(calls, ['mkdir:/tmp/subminer-config']);
assert.equal(setPathValue, '/tmp/subminer-config');
});

View File

@@ -231,10 +231,10 @@ export function createMainBootServices<
params.registerSecondInstanceHandlerEarly(
listener as (_event: unknown, argv: string[]) => void,
);
return params.app;
return appLifecycleApp;
}
params.app.on(event, listener);
return params.app;
return appLifecycleApp;
},
whenReady: () => params.app.whenReady(),
} as TAppLifecycleApp;

View File

@@ -48,9 +48,14 @@ test('buildDictionaryZip writes a valid stored zip without fs.writeFileSync', ()
const termEntries: CharacterDictionaryTermEntry[] = [
['アルファ', 'あるふぁ', '', '', 0, ['Alpha entry'], 0, 'name'],
];
const originalWriteFileSync = fs.writeFileSync;
const originalBufferConcat = Buffer.concat;
try {
fs.writeFileSync = ((..._args: unknown[]) => {
throw new Error('buildDictionaryZip should not call fs.writeFileSync');
}) as typeof fs.writeFileSync;
Buffer.concat = ((...args: Parameters<typeof Buffer.concat>) => {
throw new Error(`buildDictionaryZip should not Buffer.concat the full archive (${args[0].length} chunks)`);
}) as typeof Buffer.concat;
@@ -92,6 +97,7 @@ test('buildDictionaryZip writes a valid stored zip without fs.writeFileSync', ()
assert.equal(termBank[0]?.[0], 'アルファ');
assert.deepEqual(entries.get('images/alpha.bin'), Buffer.from([1, 2, 3]));
} finally {
fs.writeFileSync = originalWriteFileSync;
Buffer.concat = originalBufferConcat;
cleanupDir(tempDir);
}