mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-04-01 06:12:07 -07:00
feat: add app-owned YouTube subtitle flow with absPlayer-style parsing (#31)
* fix: harden preload argv parsing for popup windows * fix: align youtube playback with shared overlay startup * fix: unwrap mpv youtube streams for anki media mining * docs: update docs for youtube subtitle and mining flow * refactor: unify cli and runtime wiring for startup and youtube flow * feat: update subtitle sidebar overlay behavior * chore: add shared log-file source for diagnostics * fix(ci): add changelog fragment for immersion changes * fix: address CodeRabbit review feedback * fix: persist canonical title from youtube metadata * style: format stats library tab * fix: address latest review feedback * style: format stats library files * test: stub launcher youtube deps in CI * test: isolate launcher youtube flow deps * test: stub launcher youtube deps in failing case * test: force x11 backend in launcher ci harness * test: address latest review feedback * fix(launcher): preserve user YouTube ytdl raw options * docs(backlog): update task tracking notes * fix(immersion): special-case youtube media paths in runtime and tracking * feat(stats): improve YouTube media metadata and picker key handling * fix(ci): format stats media library hook * fix: address latest CodeRabbit review items * docs: update youtube release notes and docs * feat: auto-load youtube subtitles before manual picker * fix: restore app-owned youtube subtitle flow * docs: update youtube playback docs and config copy * refactor: remove legacy youtube launcher mode plumbing * fix: refine youtube subtitle startup binding * docs: clarify youtube subtitle startup behavior * fix: address PR #31 latest review follow-ups * fix: address PR #31 follow-up review comments * test: harden youtube picker test harness * udpate backlog * fix: add timeout to youtube metadata probe * docs: refresh youtube and stats docs * update backlog * update backlog * chore: release v0.9.0
This commit is contained in:
@@ -4,6 +4,7 @@ export const OVERLAY_HOSTED_MODALS = [
|
||||
'runtime-options',
|
||||
'subsync',
|
||||
'jimaku',
|
||||
'youtube-track-picker',
|
||||
'kiku',
|
||||
'controller-select',
|
||||
'controller-debug',
|
||||
@@ -18,6 +19,7 @@ export const IPC_CHANNELS = {
|
||||
openYomitanSettings: 'open-yomitan-settings',
|
||||
recordYomitanLookup: 'record-yomitan-lookup',
|
||||
quitApp: 'quit-app',
|
||||
youtubePickerResolve: 'youtube:picker-resolve',
|
||||
toggleDevTools: 'toggle-dev-tools',
|
||||
toggleOverlay: 'toggle-overlay',
|
||||
saveSubtitlePosition: 'save-subtitle-position',
|
||||
@@ -51,6 +53,7 @@ export const IPC_CHANNELS = {
|
||||
getControllerConfig: 'get-controller-config',
|
||||
getSecondarySubMode: 'get-secondary-sub-mode',
|
||||
getCurrentSecondarySub: 'get-current-secondary-sub',
|
||||
youtubePickerResolve: 'youtube:picker-resolve',
|
||||
focusMainWindow: 'focus-main-window',
|
||||
runSubsyncManual: 'subsync:run-manual',
|
||||
getAnkiConnectStatus: 'get-anki-connect-status',
|
||||
@@ -94,6 +97,8 @@ export const IPC_CHANNELS = {
|
||||
runtimeOptionsChanged: 'runtime-options:changed',
|
||||
runtimeOptionsOpen: 'runtime-options:open',
|
||||
jimakuOpen: 'jimaku:open',
|
||||
youtubePickerOpen: 'youtube:picker-open',
|
||||
youtubePickerCancel: 'youtube:picker-cancel',
|
||||
keyboardModeToggleRequested: 'keyboard-mode-toggle:requested',
|
||||
lookupWindowToggleRequested: 'lookup-window-toggle:requested',
|
||||
configHotReload: 'config:hot-reload',
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
RuntimeOptionValue,
|
||||
SubtitlePosition,
|
||||
SubsyncManualRunRequest,
|
||||
YoutubePickerResolveRequest,
|
||||
} from '../../types';
|
||||
import { OVERLAY_HOSTED_MODALS, type OverlayHostedModal } from './contracts';
|
||||
|
||||
@@ -253,3 +254,36 @@ export function parseJimakuDownloadQuery(value: unknown): JimakuDownloadQuery |
|
||||
name: value.name,
|
||||
};
|
||||
}
|
||||
|
||||
export function parseYoutubePickerResolveRequest(value: unknown): YoutubePickerResolveRequest | null {
|
||||
if (!isObject(value)) return null;
|
||||
if (typeof value.sessionId !== 'string' || !value.sessionId.trim()) return null;
|
||||
if (value.action !== 'use-selected' && value.action !== 'continue-without-subtitles') return null;
|
||||
if (value.action === 'continue-without-subtitles') {
|
||||
if (value.primaryTrackId !== null || value.secondaryTrackId !== null) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
sessionId: value.sessionId,
|
||||
action: 'continue-without-subtitles',
|
||||
primaryTrackId: null,
|
||||
secondaryTrackId: null,
|
||||
};
|
||||
}
|
||||
if (value.primaryTrackId !== null && value.primaryTrackId !== undefined && typeof value.primaryTrackId !== 'string') {
|
||||
return null;
|
||||
}
|
||||
if (
|
||||
value.secondaryTrackId !== null &&
|
||||
value.secondaryTrackId !== undefined &&
|
||||
typeof value.secondaryTrackId !== 'string'
|
||||
) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
sessionId: value.sessionId,
|
||||
action: 'use-selected',
|
||||
primaryTrackId: value.primaryTrackId ?? null,
|
||||
secondaryTrackId: value.secondaryTrackId ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
67
src/shared/log-files.test.ts
Normal file
67
src/shared/log-files.test.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import {
|
||||
appendLogLine,
|
||||
pruneLogFiles,
|
||||
resolveDefaultLogFilePath,
|
||||
} from './log-files';
|
||||
|
||||
test('resolveDefaultLogFilePath uses app prefix by default', () => {
|
||||
const now = new Date('2026-03-22T12:00:00.000Z');
|
||||
const resolved = resolveDefaultLogFilePath('app', {
|
||||
platform: 'linux',
|
||||
homeDir: '/home/tester',
|
||||
now,
|
||||
});
|
||||
|
||||
assert.equal(
|
||||
resolved,
|
||||
path.join(
|
||||
'/home/tester',
|
||||
'.config',
|
||||
'SubMiner',
|
||||
'logs',
|
||||
`app-${now.toISOString().slice(0, 10)}.log`,
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('pruneLogFiles removes logs older than retention window', () => {
|
||||
const logsDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-log-prune-'));
|
||||
const stalePath = path.join(logsDir, 'app-old.log');
|
||||
const freshPath = path.join(logsDir, 'app-fresh.log');
|
||||
fs.writeFileSync(stalePath, 'stale\n', 'utf8');
|
||||
fs.writeFileSync(freshPath, 'fresh\n', 'utf8');
|
||||
const now = new Date('2026-03-22T12:00:00.000Z');
|
||||
fs.utimesSync(stalePath, new Date('2026-03-01T12:00:00.000Z'), new Date('2026-03-01T12:00:00.000Z'));
|
||||
fs.utimesSync(freshPath, new Date('2026-03-21T12:00:00.000Z'), new Date('2026-03-21T12:00:00.000Z'));
|
||||
|
||||
try {
|
||||
pruneLogFiles(logsDir, { retentionDays: 7, now });
|
||||
|
||||
assert.equal(fs.existsSync(stalePath), false);
|
||||
assert.equal(fs.existsSync(freshPath), true);
|
||||
} finally {
|
||||
fs.rmSync(logsDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('appendLogLine trims oversized logs to newest bytes', () => {
|
||||
const logsDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-log-trim-'));
|
||||
const logPath = path.join(logsDir, 'app.log');
|
||||
|
||||
try {
|
||||
appendLogLine(logPath, '012345678901234567890123456789', { maxBytes: 48, retentionDays: 30 });
|
||||
appendLogLine(logPath, 'abcdefghijabcdefghijabcdefghij', { maxBytes: 48, retentionDays: 30 });
|
||||
|
||||
const content = fs.readFileSync(logPath, 'utf8');
|
||||
assert.match(content, /\[truncated older log content\]/);
|
||||
assert.match(content, /abcdefghij/);
|
||||
assert.ok(Buffer.byteLength(content) <= 48);
|
||||
} finally {
|
||||
fs.rmSync(logsDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
123
src/shared/log-files.ts
Normal file
123
src/shared/log-files.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import fs from 'node:fs';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
|
||||
export type LogKind = 'app' | 'launcher' | 'mpv';
|
||||
|
||||
export const DEFAULT_LOG_RETENTION_DAYS = 7;
|
||||
export const DEFAULT_LOG_MAX_BYTES = 10 * 1024 * 1024;
|
||||
|
||||
const TRUNCATED_MARKER = '[truncated older log content]\n';
|
||||
const prunedDirectories = new Set<string>();
|
||||
|
||||
export function resolveLogBaseDir(options?: {
|
||||
platform?: NodeJS.Platform;
|
||||
homeDir?: string;
|
||||
appDataDir?: string;
|
||||
}): string {
|
||||
const platform = options?.platform ?? process.platform;
|
||||
const homeDir = options?.homeDir ?? os.homedir();
|
||||
return platform === 'win32'
|
||||
? path.join(options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'), 'SubMiner')
|
||||
: path.join(homeDir, '.config', 'SubMiner');
|
||||
}
|
||||
|
||||
export function resolveDefaultLogFilePath(
|
||||
kind: LogKind = 'app',
|
||||
options?: {
|
||||
platform?: NodeJS.Platform;
|
||||
homeDir?: string;
|
||||
appDataDir?: string;
|
||||
now?: Date;
|
||||
},
|
||||
): string {
|
||||
const date = (options?.now ?? new Date()).toISOString().slice(0, 10);
|
||||
return path.join(resolveLogBaseDir(options), 'logs', `${kind}-${date}.log`);
|
||||
}
|
||||
|
||||
export function pruneLogFiles(
|
||||
logsDir: string,
|
||||
options?: {
|
||||
retentionDays?: number;
|
||||
now?: Date;
|
||||
},
|
||||
): void {
|
||||
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
|
||||
if (!Number.isFinite(retentionDays) || retentionDays <= 0) return;
|
||||
|
||||
let entries: string[];
|
||||
try {
|
||||
entries = fs.readdirSync(logsDir);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
|
||||
const cutoffMs = (options?.now ?? new Date()).getTime() - retentionDays * 24 * 60 * 60 * 1000;
|
||||
for (const entry of entries) {
|
||||
const candidate = path.join(logsDir, entry);
|
||||
let stats: fs.Stats;
|
||||
try {
|
||||
stats = fs.statSync(candidate);
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
if (!stats.isFile() || !entry.endsWith('.log') || stats.mtimeMs >= cutoffMs) continue;
|
||||
try {
|
||||
fs.rmSync(candidate, { force: true });
|
||||
} catch {
|
||||
// ignore cleanup failures
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function maybePruneLogDirectory(logPath: string, retentionDays: number): void {
|
||||
const logsDir = path.dirname(logPath);
|
||||
const key = `${logsDir}:${new Date().toISOString().slice(0, 10)}:${retentionDays}`;
|
||||
if (prunedDirectories.has(key)) return;
|
||||
pruneLogFiles(logsDir, { retentionDays });
|
||||
prunedDirectories.add(key);
|
||||
}
|
||||
|
||||
function trimLogFileToMaxBytes(logPath: string, maxBytes: number): void {
|
||||
if (!Number.isFinite(maxBytes) || maxBytes <= 0) return;
|
||||
|
||||
let stats: fs.Stats;
|
||||
try {
|
||||
stats = fs.statSync(logPath);
|
||||
} catch {
|
||||
return;
|
||||
}
|
||||
if (stats.size <= maxBytes) return;
|
||||
|
||||
try {
|
||||
const buffer = fs.readFileSync(logPath);
|
||||
const marker = Buffer.from(TRUNCATED_MARKER, 'utf8');
|
||||
const tailBudget = Math.max(0, maxBytes - marker.length);
|
||||
const tail =
|
||||
tailBudget > 0 ? buffer.subarray(Math.max(0, buffer.length - tailBudget)) : Buffer.alloc(0);
|
||||
fs.writeFileSync(logPath, Buffer.concat([marker, tail]));
|
||||
} catch {
|
||||
// ignore trim failures
|
||||
}
|
||||
}
|
||||
|
||||
export function appendLogLine(
|
||||
logPath: string,
|
||||
line: string,
|
||||
options?: {
|
||||
retentionDays?: number;
|
||||
maxBytes?: number;
|
||||
},
|
||||
): void {
|
||||
const retentionDays = options?.retentionDays ?? DEFAULT_LOG_RETENTION_DAYS;
|
||||
const maxBytes = options?.maxBytes ?? DEFAULT_LOG_MAX_BYTES;
|
||||
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(logPath), { recursive: true });
|
||||
maybePruneLogDirectory(logPath, retentionDays);
|
||||
fs.appendFileSync(logPath, `${line}\n`, { encoding: 'utf8' });
|
||||
trimLogFileToMaxBytes(logPath, maxBytes);
|
||||
} catch {
|
||||
// never break runtime due to logging sink failures
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user