feat: add app-owned YouTube subtitle flow with absPlayer-style parsing (#31)

* fix: harden preload argv parsing for popup windows

* fix: align youtube playback with shared overlay startup

* fix: unwrap mpv youtube streams for anki media mining

* docs: update docs for youtube subtitle and mining flow

* refactor: unify cli and runtime wiring for startup and youtube flow

* feat: update subtitle sidebar overlay behavior

* chore: add shared log-file source for diagnostics

* fix(ci): add changelog fragment for immersion changes

* fix: address CodeRabbit review feedback

* fix: persist canonical title from youtube metadata

* style: format stats library tab

* fix: address latest review feedback

* style: format stats library files

* test: stub launcher youtube deps in CI

* test: isolate launcher youtube flow deps

* test: stub launcher youtube deps in failing case

* test: force x11 backend in launcher ci harness

* test: address latest review feedback

* fix(launcher): preserve user YouTube ytdl raw options

* docs(backlog): update task tracking notes

* fix(immersion): special-case youtube media paths in runtime and tracking

* feat(stats): improve YouTube media metadata and picker key handling

* fix(ci): format stats media library hook

* fix: address latest CodeRabbit review items

* docs: update youtube release notes and docs

* feat: auto-load youtube subtitles before manual picker

* fix: restore app-owned youtube subtitle flow

* docs: update youtube playback docs and config copy

* refactor: remove legacy youtube launcher mode plumbing

* fix: refine youtube subtitle startup binding

* docs: clarify youtube subtitle startup behavior

* fix: address PR #31 latest review follow-ups

* fix: address PR #31 follow-up review comments

* test: harden youtube picker test harness

* udpate backlog

* fix: add timeout to youtube metadata probe

* docs: refresh youtube and stats docs

* update backlog

* update backlog

* chore: release v0.9.0
This commit is contained in:
2026-03-24 00:01:24 -07:00
committed by GitHub
parent c17f0a4080
commit 5feed360ca
219 changed files with 12778 additions and 1052 deletions

123
src/shared/log-files.ts Normal file
View File

@@ -0,0 +1,123 @@
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
export type LogKind = 'app' | 'launcher' | 'mpv';
export const DEFAULT_LOG_RETENTION_DAYS = 7;
export const DEFAULT_LOG_MAX_BYTES = 10 * 1024 * 1024;
const TRUNCATED_MARKER = '[truncated older log content]\n';
const prunedDirectories = new Set<string>();
export function resolveLogBaseDir(options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
}): string {
const platform = options?.platform ?? process.platform;
const homeDir = options?.homeDir ?? os.homedir();
return platform === 'win32'
? path.join(options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'), 'SubMiner')
: path.join(homeDir, '.config', 'SubMiner');
}
export function resolveDefaultLogFilePath(
kind: LogKind = 'app',
options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
now?: Date;
},
): string {
const date = (options?.now ?? new Date()).toISOString().slice(0, 10);
return path.join(resolveLogBaseDir(options), 'logs', `${kind}-${date}.log`);
}
export function pruneLogFiles(
logsDir: string,
options?: {
retentionDays?: number;
now?: Date;
},
): void {
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
if (!Number.isFinite(retentionDays) || retentionDays <= 0) return;
let entries: string[];
try {
entries = fs.readdirSync(logsDir);
} catch {
return;
}
const cutoffMs = (options?.now ?? new Date()).getTime() - retentionDays * 24 * 60 * 60 * 1000;
for (const entry of entries) {
const candidate = path.join(logsDir, entry);
let stats: fs.Stats;
try {
stats = fs.statSync(candidate);
} catch {
continue;
}
if (!stats.isFile() || !entry.endsWith('.log') || stats.mtimeMs >= cutoffMs) continue;
try {
fs.rmSync(candidate, { force: true });
} catch {
// ignore cleanup failures
}
}
}
function maybePruneLogDirectory(logPath: string, retentionDays: number): void {
const logsDir = path.dirname(logPath);
const key = `${logsDir}:${new Date().toISOString().slice(0, 10)}:${retentionDays}`;
if (prunedDirectories.has(key)) return;
pruneLogFiles(logsDir, { retentionDays });
prunedDirectories.add(key);
}
function trimLogFileToMaxBytes(logPath: string, maxBytes: number): void {
if (!Number.isFinite(maxBytes) || maxBytes <= 0) return;
let stats: fs.Stats;
try {
stats = fs.statSync(logPath);
} catch {
return;
}
if (stats.size <= maxBytes) return;
try {
const buffer = fs.readFileSync(logPath);
const marker = Buffer.from(TRUNCATED_MARKER, 'utf8');
const tailBudget = Math.max(0, maxBytes - marker.length);
const tail =
tailBudget > 0 ? buffer.subarray(Math.max(0, buffer.length - tailBudget)) : Buffer.alloc(0);
fs.writeFileSync(logPath, Buffer.concat([marker, tail]));
} catch {
// ignore trim failures
}
}
export function appendLogLine(
logPath: string,
line: string,
options?: {
retentionDays?: number;
maxBytes?: number;
},
): void {
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
const maxBytes = options?.maxBytes ?? DEFAULT_LOG_MAX_BYTES;
try {
fs.mkdirSync(path.dirname(logPath), { recursive: true });
maybePruneLogDirectory(logPath, retentionDays);
fs.appendFileSync(logPath, `${line}\n`, { encoding: 'utf8' });
trimLogFileToMaxBytes(logPath, maxBytes);
} catch {
// never break runtime due to logging sink failures
}
}