chore: add shared log-file source for diagnostics

This commit is contained in:
2026-03-22 18:38:58 -07:00
parent 16f7b2507b
commit 8928bfdf7e
3 changed files with 188 additions and 5 deletions

View File

@@ -1,5 +0,0 @@
type: changed
area: subtitle sidebar
- Added subtitle sidebar state and behavior updates, including startup-auto-open controls and resume positioning improvements.
- Fixed subtitle prefetch and embedded overlay passthrough sync between sidebar and overlay subtitle rendering.

View File

@@ -0,0 +1,65 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import {
appendLogLine,
pruneLogFiles,
resolveDefaultLogFilePath,
} from './log-files';
test('resolveDefaultLogFilePath uses app prefix by default', () => {
const resolved = resolveDefaultLogFilePath('app', {
platform: 'linux',
homeDir: '/home/tester',
});
assert.equal(
resolved,
path.join(
'/home/tester',
'.config',
'SubMiner',
'logs',
`app-${new Date().toISOString().slice(0, 10)}.log`,
),
);
});
test('pruneLogFiles removes logs older than retention window', () => {
const logsDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-log-prune-'));
const stalePath = path.join(logsDir, 'app-old.log');
const freshPath = path.join(logsDir, 'app-fresh.log');
fs.writeFileSync(stalePath, 'stale\n', 'utf8');
fs.writeFileSync(freshPath, 'fresh\n', 'utf8');
const now = new Date('2026-03-22T12:00:00.000Z');
fs.utimesSync(stalePath, new Date('2026-03-01T12:00:00.000Z'), new Date('2026-03-01T12:00:00.000Z'));
fs.utimesSync(freshPath, new Date('2026-03-21T12:00:00.000Z'), new Date('2026-03-21T12:00:00.000Z'));
try {
pruneLogFiles(logsDir, { retentionDays: 7, now });
assert.equal(fs.existsSync(stalePath), false);
assert.equal(fs.existsSync(freshPath), true);
} finally {
fs.rmSync(logsDir, { recursive: true, force: true });
}
});
test('appendLogLine trims oversized logs to newest bytes', () => {
const logsDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-log-trim-'));
const logPath = path.join(logsDir, 'app.log');
try {
appendLogLine(logPath, '012345678901234567890123456789', { maxBytes: 48, retentionDays: 30 });
appendLogLine(logPath, 'abcdefghijabcdefghijabcdefghij', { maxBytes: 48, retentionDays: 30 });
const content = fs.readFileSync(logPath, 'utf8');
assert.match(content, /\[truncated older log content\]/);
assert.match(content, /abcdefghij/);
assert.ok(Buffer.byteLength(content) <= 48);
} finally {
fs.rmSync(logsDir, { recursive: true, force: true });
}
});

123
src/shared/log-files.ts Normal file
View File

@@ -0,0 +1,123 @@
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
export type LogKind = 'app' | 'launcher' | 'mpv';
export const DEFAULT_LOG_RETENTION_DAYS = 7;
export const DEFAULT_LOG_MAX_BYTES = 10 * 1024 * 1024;
const TRUNCATED_MARKER = '[truncated older log content]\n';
const prunedDirectories = new Set<string>();
export function resolveLogBaseDir(options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
}): string {
const platform = options?.platform ?? process.platform;
const homeDir = options?.homeDir ?? os.homedir();
return platform === 'win32'
? path.join(options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'), 'SubMiner')
: path.join(homeDir, '.config', 'SubMiner');
}
export function resolveDefaultLogFilePath(
kind: LogKind = 'app',
options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
now?: Date;
},
): string {
const date = (options?.now ?? new Date()).toISOString().slice(0, 10);
return path.join(resolveLogBaseDir(options), 'logs', `${kind}-${date}.log`);
}
export function pruneLogFiles(
logsDir: string,
options?: {
retentionDays?: number;
now?: Date;
},
): void {
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
if (!Number.isFinite(retentionDays) || retentionDays <= 0) return;
let entries: string[];
try {
entries = fs.readdirSync(logsDir);
} catch {
return;
}
const cutoffMs = (options?.now ?? new Date()).getTime() - retentionDays * 24 * 60 * 60 * 1000;
for (const entry of entries) {
const candidate = path.join(logsDir, entry);
let stats: fs.Stats;
try {
stats = fs.statSync(candidate);
} catch {
continue;
}
if (!stats.isFile() || !entry.endsWith('.log') || stats.mtimeMs >= cutoffMs) continue;
try {
fs.rmSync(candidate, { force: true });
} catch {
// ignore cleanup failures
}
}
}
function maybePruneLogDirectory(logPath: string, retentionDays: number): void {
const logsDir = path.dirname(logPath);
const key = `${logsDir}:${new Date().toISOString().slice(0, 10)}:${retentionDays}`;
if (prunedDirectories.has(key)) return;
pruneLogFiles(logsDir, { retentionDays });
prunedDirectories.add(key);
}
function trimLogFileToMaxBytes(logPath: string, maxBytes: number): void {
if (!Number.isFinite(maxBytes) || maxBytes <= 0) return;
let stats: fs.Stats;
try {
stats = fs.statSync(logPath);
} catch {
return;
}
if (stats.size <= maxBytes) return;
try {
const buffer = fs.readFileSync(logPath);
const marker = Buffer.from(TRUNCATED_MARKER, 'utf8');
const tailBudget = Math.max(0, maxBytes - marker.length);
const tail =
tailBudget > 0 ? buffer.subarray(Math.max(0, buffer.length - tailBudget)) : Buffer.alloc(0);
fs.writeFileSync(logPath, Buffer.concat([marker, tail]));
} catch {
// ignore trim failures
}
}
export function appendLogLine(
logPath: string,
line: string,
options?: {
retentionDays?: number;
maxBytes?: number;
},
): void {
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
const maxBytes = options?.maxBytes ?? DEFAULT_LOG_MAX_BYTES;
try {
fs.mkdirSync(path.dirname(logPath), { recursive: true });
maybePruneLogDirectory(logPath, retentionDays);
fs.appendFileSync(logPath, `${line}\n`, { encoding: 'utf8' });
trimLogFileToMaxBytes(logPath, maxBytes);
} catch {
// never break runtime due to logging sink failures
}
}