mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-03-30 06:12:06 -07:00
refactor(main): extract remaining inline runtime logic from main
This commit is contained in:
@@ -1,9 +1,10 @@
|
||||
---
|
||||
id: TASK-238.6
|
||||
title: Extract remaining inline runtime logic and composer gaps from src/main.ts
|
||||
status: To Do
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-03-27 00:00'
|
||||
updated_date: '2026-03-27 19:24'
|
||||
labels:
|
||||
- tech-debt
|
||||
- runtime
|
||||
@@ -34,11 +35,11 @@ priority: high
|
||||
## Acceptance Criteria
|
||||
|
||||
<!-- AC:BEGIN -->
|
||||
- [ ] #1 `runYoutubePlaybackFlow`, `maybeSignalPluginAutoplayReady`, `refreshSubtitlePrefetchFromActiveTrack`, `publishDiscordPresence`, and `handleModalInputStateChange` no longer live as substantial inline logic in `src/main.ts`.
|
||||
- [ ] #2 The large subtitle/prefetch, stats startup, and overlay visibility dependency groupings are wrapped behind named composer helpers instead of remaining inline in `src/main.ts`.
|
||||
- [ ] #3 `src/main.ts` reads primarily as a boot and lifecycle coordinator, with domain behavior concentrated in named runtime modules.
|
||||
- [ ] #4 Focused tests cover the extracted behavior or the new composer surfaces.
|
||||
- [ ] #5 The task records whether the remaining size still justifies a boot-phase split or whether that follow-up can wait.
|
||||
- [x] #1 `runYoutubePlaybackFlow`, `maybeSignalPluginAutoplayReady`, `refreshSubtitlePrefetchFromActiveTrack`, `publishDiscordPresence`, and `handleModalInputStateChange` no longer live as substantial inline logic in `src/main.ts`.
|
||||
- [x] #2 The large subtitle/prefetch, stats startup, and overlay visibility dependency groupings are wrapped behind named composer helpers instead of remaining inline in `src/main.ts`.
|
||||
- [x] #3 `src/main.ts` reads primarily as a boot and lifecycle coordinator, with domain behavior concentrated in named runtime modules.
|
||||
- [x] #4 Focused tests cover the extracted behavior or the new composer surfaces.
|
||||
- [x] #5 The task records whether the remaining size still justifies a boot-phase split or whether that follow-up can wait.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Plan
|
||||
@@ -58,3 +59,24 @@ Guardrails:
|
||||
- Prefer moving logic to existing runtime surfaces over creating new giant helper files.
|
||||
- Do not expand into unrelated `src/main.ts` cleanup that is already tracked by other TASK-238 slices.
|
||||
<!-- SECTION:PLAN:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Extracted the remaining inline runtime seams from `src/main.ts` into focused runtime modules:
|
||||
`src/main/runtime/youtube-playback-runtime.ts`,
|
||||
`src/main/runtime/autoplay-ready-gate.ts`,
|
||||
`src/main/runtime/subtitle-prefetch-runtime.ts`,
|
||||
`src/main/runtime/discord-presence-runtime.ts`,
|
||||
and `src/main/runtime/overlay-modal-input-state.ts`.
|
||||
|
||||
Added named composer wrappers for the grouped subtitle/prefetch, stats startup, and overlay visibility wiring in `src/main/runtime/composers/`.
|
||||
|
||||
Re-scan result for the boot-phase split follow-up: the entrypoint is materially closer to a boot/lifecycle coordinator now, so TASK-238.7 remains a valid future cleanup but no longer feels urgent or blocking for maintainability.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
TASK-238.6 is complete. Verification passed with `bun run typecheck`, focused runtime/composer tests, `bun run test:fast`, `bun run test:env`, and `bun run build`. The remaining `src/main.ts` work is now better isolated behind runtime modules and composer helpers, and the boot-phase split can wait for a later cleanup pass instead of being treated as immediate follow-on work.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
|
||||
615
src/main.ts
615
src/main.ts
@@ -319,11 +319,12 @@ import {
|
||||
shouldAutoOpenFirstRunSetup,
|
||||
} from './main/runtime/first-run-setup-service';
|
||||
import { createYoutubeFlowRuntime } from './main/runtime/youtube-flow';
|
||||
import { createYoutubePlaybackRuntime } from './main/runtime/youtube-playback-runtime';
|
||||
import {
|
||||
clearYoutubePrimarySubtitleNotificationTimer,
|
||||
createYoutubePrimarySubtitleNotificationRuntime,
|
||||
} from './main/runtime/youtube-primary-subtitle-notification';
|
||||
import { resolveAutoplayReadyMaxReleaseAttempts } from './main/runtime/startup-autoplay-release-policy';
|
||||
import { createAutoplayReadyGate } from './main/runtime/autoplay-ready-gate';
|
||||
import {
|
||||
buildFirstRunSetupHtml,
|
||||
createMaybeFocusExistingFirstRunSetupWindowHandler,
|
||||
@@ -384,7 +385,10 @@ import {
|
||||
composeJellyfinRuntimeHandlers,
|
||||
composeMpvRuntimeHandlers,
|
||||
composeOverlayWindowHandlers,
|
||||
composeOverlayVisibilityRuntime,
|
||||
composeShortcutRuntimes,
|
||||
composeStatsStartupRuntime,
|
||||
composeSubtitlePrefetchRuntime,
|
||||
composeStartupLifecycleHandlers,
|
||||
} from './main/runtime/composers';
|
||||
import { createStartupBootstrapRuntimeDeps } from './main/startup';
|
||||
@@ -399,6 +403,7 @@ import { registerIpcRuntimeServices } from './main/ipc-runtime';
|
||||
import { createAnkiJimakuIpcRuntimeServiceDeps } from './main/dependencies';
|
||||
import { handleCliCommandRuntimeServiceWithContext } from './main/cli-runtime';
|
||||
import { createOverlayModalRuntimeService } from './main/overlay-runtime';
|
||||
import { createOverlayModalInputState } from './main/runtime/overlay-modal-input-state';
|
||||
import { openYoutubeTrackPicker } from './main/runtime/youtube-picker-open';
|
||||
import type { OverlayHostedModal } from './shared/ipc/contracts';
|
||||
import { createOverlayShortcutsRuntimeService } from './main/overlay-shortcuts-runtime';
|
||||
@@ -412,12 +417,17 @@ import {
|
||||
} from './main/jlpt-runtime';
|
||||
import { createMediaRuntimeService } from './main/media-runtime';
|
||||
import { createOverlayVisibilityRuntimeService } from './main/overlay-visibility-runtime';
|
||||
import { createDiscordPresenceRuntime } from './main/runtime/discord-presence-runtime';
|
||||
import { createCharacterDictionaryRuntimeService } from './main/character-dictionary-runtime';
|
||||
import { createCharacterDictionaryAutoSyncRuntimeService } from './main/runtime/character-dictionary-auto-sync';
|
||||
import { handleCharacterDictionaryAutoSyncComplete } from './main/runtime/character-dictionary-auto-sync-completion';
|
||||
import { notifyCharacterDictionaryAutoSyncStatus } from './main/runtime/character-dictionary-auto-sync-notifications';
|
||||
import { createCurrentMediaTokenizationGate } from './main/runtime/current-media-tokenization-gate';
|
||||
import { createStartupOsdSequencer } from './main/runtime/startup-osd-sequencer';
|
||||
import {
|
||||
createRefreshSubtitlePrefetchFromActiveTrackHandler,
|
||||
createResolveActiveSubtitleSidebarSourceHandler,
|
||||
} from './main/runtime/subtitle-prefetch-runtime';
|
||||
import {
|
||||
createCreateAnilistSetupWindowHandler,
|
||||
createCreateFirstRunSetupWindowHandler,
|
||||
@@ -502,9 +512,6 @@ let anilistUpdateInFlightState = createInitialAnilistUpdateInFlightState();
|
||||
const anilistAttemptedUpdateKeys = new Set<string>();
|
||||
let anilistCachedAccessToken: string | null = null;
|
||||
let jellyfinPlayQuitOnDisconnectArmed = false;
|
||||
let youtubePlayQuitOnDisconnectArmed = false;
|
||||
let youtubePlayQuitOnDisconnectArmTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let youtubePlaybackFlowGeneration = 0;
|
||||
const JELLYFIN_LANG_PREF = 'ja,jp,jpn,japanese,en,eng,english,enUS,en-US';
|
||||
const JELLYFIN_TICKS_PER_SECOND = 10_000_000;
|
||||
const JELLYFIN_REMOTE_PROGRESS_INTERVAL_MS = 3000;
|
||||
@@ -771,27 +778,17 @@ process.on('SIGTERM', () => {
|
||||
});
|
||||
|
||||
const overlayManager = createOverlayManager();
|
||||
let overlayModalInputExclusive = false;
|
||||
let syncOverlayShortcutsForModal: (isActive: boolean) => void = () => {};
|
||||
let syncOverlayVisibilityForModal: () => void = () => {};
|
||||
|
||||
const handleModalInputStateChange = (isActive: boolean): void => {
|
||||
if (overlayModalInputExclusive === isActive) return;
|
||||
overlayModalInputExclusive = isActive;
|
||||
if (isActive) {
|
||||
const modalWindow = overlayManager.getModalWindow();
|
||||
if (modalWindow && !modalWindow.isDestroyed()) {
|
||||
modalWindow.setIgnoreMouseEvents(false);
|
||||
modalWindow.setAlwaysOnTop(true, 'screen-saver', 1);
|
||||
modalWindow.focus();
|
||||
if (!modalWindow.webContents.isFocused()) {
|
||||
modalWindow.webContents.focus();
|
||||
}
|
||||
}
|
||||
}
|
||||
const overlayModalInputState = createOverlayModalInputState({
|
||||
getModalWindow: () => overlayManager.getModalWindow(),
|
||||
syncOverlayShortcutsForModal: (isActive) => {
|
||||
syncOverlayShortcutsForModal(isActive);
|
||||
},
|
||||
syncOverlayVisibilityForModal: () => {
|
||||
syncOverlayVisibilityForModal();
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
const buildOverlayContentMeasurementStoreMainDepsHandler =
|
||||
createBuildOverlayContentMeasurementStoreMainDepsHandler({
|
||||
@@ -812,7 +809,8 @@ const overlayContentMeasurementStore = createOverlayContentMeasurementStore(
|
||||
const overlayModalRuntime = createOverlayModalRuntimeService(
|
||||
buildOverlayModalRuntimeMainDepsHandler(),
|
||||
{
|
||||
onModalStateChange: (isActive: boolean) => handleModalInputStateChange(isActive),
|
||||
onModalStateChange: (isActive: boolean) =>
|
||||
overlayModalInputState.handleModalInputStateChange(isActive),
|
||||
},
|
||||
);
|
||||
const appState = createAppState({
|
||||
@@ -856,7 +854,7 @@ const youtubeFlowRuntime = createYoutubeFlowRuntime({
|
||||
subtitleProcessingController.refreshCurrentSubtitle(text);
|
||||
},
|
||||
refreshSubtitleSidebarSource: async (sourcePath: string) => {
|
||||
await refreshSubtitleSidebarFromSource(sourcePath);
|
||||
await subtitlePrefetchRuntime.refreshSubtitleSidebarFromSource(sourcePath);
|
||||
},
|
||||
startTokenizationWarmups: async () => {
|
||||
await startTokenizationWarmups();
|
||||
@@ -969,114 +967,51 @@ const waitForYoutubeMpvConnected = createWaitForMpvConnectedHandler({
|
||||
now: () => Date.now(),
|
||||
sleep: (delayMs) => new Promise((resolve) => setTimeout(resolve, delayMs)),
|
||||
});
|
||||
|
||||
function clearYoutubePlayQuitOnDisconnectArmTimer(): void {
|
||||
if (youtubePlayQuitOnDisconnectArmTimer) {
|
||||
clearTimeout(youtubePlayQuitOnDisconnectArmTimer);
|
||||
youtubePlayQuitOnDisconnectArmTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
function invalidatePendingAutoplayReadyFallbacks(): void {
|
||||
autoPlayReadySignalMediaPath = null;
|
||||
autoPlayReadySignalGeneration += 1;
|
||||
}
|
||||
|
||||
async function runYoutubePlaybackFlowMain(request: {
|
||||
url: string;
|
||||
mode: NonNullable<CliArgs['youtubeMode']>;
|
||||
source: CliCommandSource;
|
||||
}): Promise<void> {
|
||||
const flowGeneration = ++youtubePlaybackFlowGeneration;
|
||||
invalidatePendingAutoplayReadyFallbacks();
|
||||
youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(true);
|
||||
let flowCompleted = false;
|
||||
try {
|
||||
clearYoutubePlayQuitOnDisconnectArmTimer();
|
||||
youtubePlayQuitOnDisconnectArmed = false;
|
||||
const autoplayReadyGate = createAutoplayReadyGate({
|
||||
isAppOwnedFlowInFlight: () => youtubePrimarySubtitleNotificationRuntime.isAppOwnedFlowInFlight(),
|
||||
getCurrentMediaPath: () => appState.currentMediaPath,
|
||||
getCurrentVideoPath: () => appState.mpvClient?.currentVideoPath ?? null,
|
||||
getPlaybackPaused: () => appState.playbackPaused,
|
||||
getMpvClient: () => appState.mpvClient,
|
||||
signalPluginAutoplayReady: () => {
|
||||
sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']);
|
||||
},
|
||||
schedule: (callback, delayMs) => setTimeout(callback, delayMs),
|
||||
logDebug: (message) => logger.debug(message),
|
||||
});
|
||||
const youtubePlaybackRuntime = createYoutubePlaybackRuntime({
|
||||
platform: process.platform,
|
||||
directPlaybackFormat: YOUTUBE_DIRECT_PLAYBACK_FORMAT,
|
||||
mpvYtdlFormat: YOUTUBE_MPV_YTDL_FORMAT,
|
||||
autoLaunchTimeoutMs: YOUTUBE_MPV_AUTO_LAUNCH_TIMEOUT_MS,
|
||||
connectTimeoutMs: YOUTUBE_MPV_CONNECT_TIMEOUT_MS,
|
||||
socketPath: appState.mpvSocketPath,
|
||||
getMpvConnected: () => Boolean(appState.mpvClient?.connected),
|
||||
invalidatePendingAutoplayReadyFallbacks: () =>
|
||||
autoplayReadyGate.invalidatePendingAutoplayReadyFallbacks(),
|
||||
setAppOwnedFlowInFlight: (next) => {
|
||||
youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(next);
|
||||
},
|
||||
ensureYoutubePlaybackRuntimeReady: async () => {
|
||||
await ensureYoutubePlaybackRuntimeReady();
|
||||
let playbackUrl = request.url;
|
||||
let launchedWindowsMpv = false;
|
||||
if (process.platform === 'win32') {
|
||||
try {
|
||||
playbackUrl = await resolveYoutubePlaybackUrl(request.url, YOUTUBE_DIRECT_PLAYBACK_FORMAT);
|
||||
logger.info('Resolved direct YouTube playback URL for Windows MPV startup.');
|
||||
} catch (error) {
|
||||
logger.warn(
|
||||
`Failed to resolve direct YouTube playback URL; falling back to page URL: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
if (process.platform === 'win32' && !appState.mpvClient?.connected) {
|
||||
const launchResult = launchWindowsMpv(
|
||||
},
|
||||
resolveYoutubePlaybackUrl: (url, format) => resolveYoutubePlaybackUrl(url, format),
|
||||
launchWindowsMpv: (playbackUrl, args) =>
|
||||
launchWindowsMpv(
|
||||
[playbackUrl],
|
||||
createWindowsMpvLaunchDeps({
|
||||
showError: (title, content) => dialog.showErrorBox(title, content),
|
||||
}),
|
||||
[
|
||||
'--pause=yes',
|
||||
'--ytdl=yes',
|
||||
`--ytdl-format=${YOUTUBE_MPV_YTDL_FORMAT}`,
|
||||
'--sub-auto=no',
|
||||
'--sub-file-paths=.;subs;subtitles',
|
||||
'--sid=auto',
|
||||
'--secondary-sid=auto',
|
||||
'--secondary-sub-visibility=no',
|
||||
'--alang=ja,jp,jpn,japanese,en,eng,english,enus,en-us',
|
||||
'--slang=ja,jp,jpn,japanese,en,eng,english,enus,en-us',
|
||||
`--log-file=${DEFAULT_MPV_LOG_PATH}`,
|
||||
`--input-ipc-server=${appState.mpvSocketPath}`,
|
||||
],
|
||||
);
|
||||
launchedWindowsMpv = launchResult.ok;
|
||||
if (launchResult.ok) {
|
||||
logger.info(`Bootstrapping Windows mpv for YouTube playback via ${launchResult.mpvPath}`);
|
||||
}
|
||||
if (!launchResult.ok) {
|
||||
logger.warn('Unable to bootstrap Windows mpv for YouTube playback.');
|
||||
}
|
||||
}
|
||||
const connected = await waitForYoutubeMpvConnected(
|
||||
launchedWindowsMpv ? YOUTUBE_MPV_AUTO_LAUNCH_TIMEOUT_MS : YOUTUBE_MPV_CONNECT_TIMEOUT_MS,
|
||||
);
|
||||
if (!connected) {
|
||||
throw new Error(
|
||||
launchedWindowsMpv
|
||||
? 'MPV not connected after auto-launch. Ensure mpv is installed and can open the requested YouTube URL.'
|
||||
: 'MPV not connected. Start mpv with the SubMiner profile or retry after mpv finishes starting.',
|
||||
);
|
||||
}
|
||||
if (request.source === 'initial') {
|
||||
youtubePlayQuitOnDisconnectArmTimer = setTimeout(() => {
|
||||
if (youtubePlaybackFlowGeneration !== flowGeneration) {
|
||||
return;
|
||||
}
|
||||
youtubePlayQuitOnDisconnectArmed = true;
|
||||
youtubePlayQuitOnDisconnectArmTimer = null;
|
||||
}, 3000);
|
||||
}
|
||||
const mediaReady = await prepareYoutubePlaybackInMpv({ url: playbackUrl });
|
||||
if (!mediaReady) {
|
||||
throw new Error('Timed out waiting for mpv to load the requested YouTube URL.');
|
||||
}
|
||||
await youtubeFlowRuntime.runYoutubePlaybackFlow({
|
||||
url: request.url,
|
||||
mode: request.mode,
|
||||
});
|
||||
flowCompleted = true;
|
||||
logger.info(`YouTube playback flow completed from ${request.source}.`);
|
||||
} finally {
|
||||
if (youtubePlaybackFlowGeneration === flowGeneration) {
|
||||
if (!flowCompleted) {
|
||||
clearYoutubePlayQuitOnDisconnectArmTimer();
|
||||
youtubePlayQuitOnDisconnectArmed = false;
|
||||
}
|
||||
youtubePrimarySubtitleNotificationRuntime.setAppOwnedFlowInFlight(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
[...args, `--log-file=${DEFAULT_MPV_LOG_PATH}`],
|
||||
),
|
||||
waitForYoutubeMpvConnected: (timeoutMs) => waitForYoutubeMpvConnected(timeoutMs),
|
||||
prepareYoutubePlaybackInMpv: (request) => prepareYoutubePlaybackInMpv(request),
|
||||
runYoutubePlaybackFlow: (request) => youtubeFlowRuntime.runYoutubePlaybackFlow(request),
|
||||
logInfo: (message) => logger.info(message),
|
||||
logWarn: (message) => logger.warn(message),
|
||||
schedule: (callback, delayMs) => setTimeout(callback, delayMs),
|
||||
clearScheduled: (timer) => clearTimeout(timer),
|
||||
});
|
||||
|
||||
let firstRunSetupMessage: string | null = null;
|
||||
const resolveWindowsMpvShortcutRuntimePaths = () =>
|
||||
@@ -1148,40 +1083,21 @@ const firstRunSetupService = createFirstRunSetupService({
|
||||
});
|
||||
const discordPresenceSessionStartedAtMs = Date.now();
|
||||
let discordPresenceMediaDurationSec: number | null = null;
|
||||
|
||||
function refreshDiscordPresenceMediaDuration(): void {
|
||||
const client = appState.mpvClient;
|
||||
if (!client || !client.connected) return;
|
||||
void client
|
||||
.requestProperty('duration')
|
||||
.then((value) => {
|
||||
const numeric = Number(value);
|
||||
discordPresenceMediaDurationSec = Number.isFinite(numeric) && numeric > 0 ? numeric : null;
|
||||
})
|
||||
.catch(() => {
|
||||
discordPresenceMediaDurationSec = null;
|
||||
});
|
||||
}
|
||||
|
||||
function publishDiscordPresence(): void {
|
||||
const discordPresenceService = appState.discordPresenceService;
|
||||
if (!discordPresenceService || getResolvedConfig().discordPresence.enabled !== true) {
|
||||
return;
|
||||
}
|
||||
|
||||
refreshDiscordPresenceMediaDuration();
|
||||
discordPresenceService.publish({
|
||||
mediaTitle: appState.currentMediaTitle,
|
||||
mediaPath: appState.currentMediaPath,
|
||||
subtitleText: appState.currentSubText,
|
||||
currentTimeSec: appState.mpvClient?.currentTimePos ?? null,
|
||||
mediaDurationSec:
|
||||
discordPresenceMediaDurationSec ?? anilistMediaGuessRuntimeState.mediaDurationSec,
|
||||
paused: appState.playbackPaused,
|
||||
connected: Boolean(appState.mpvClient?.connected),
|
||||
sessionStartedAtMs: discordPresenceSessionStartedAtMs,
|
||||
});
|
||||
}
|
||||
const discordPresenceRuntime = createDiscordPresenceRuntime({
|
||||
getDiscordPresenceService: () => appState.discordPresenceService,
|
||||
isDiscordPresenceEnabled: () => getResolvedConfig().discordPresence.enabled === true,
|
||||
getMpvClient: () => appState.mpvClient,
|
||||
getCurrentMediaTitle: () => appState.currentMediaTitle,
|
||||
getCurrentMediaPath: () => appState.currentMediaPath,
|
||||
getCurrentSubtitleText: () => appState.currentSubText,
|
||||
getPlaybackPaused: () => appState.playbackPaused,
|
||||
getFallbackMediaDurationSec: () => anilistMediaGuessRuntimeState.mediaDurationSec,
|
||||
getSessionStartedAtMs: () => discordPresenceSessionStartedAtMs,
|
||||
getMediaDurationSec: () => discordPresenceMediaDurationSec,
|
||||
setMediaDurationSec: (next) => {
|
||||
discordPresenceMediaDurationSec = next;
|
||||
},
|
||||
});
|
||||
|
||||
function createDiscordRpcClient() {
|
||||
const discordRpc = require('discord-rpc') as {
|
||||
@@ -1215,7 +1131,7 @@ async function initializeDiscordPresenceService(): Promise<void> {
|
||||
logDebug: (message, meta) => logger.debug(message, meta),
|
||||
});
|
||||
await appState.discordPresenceService.start();
|
||||
publishDiscordPresence();
|
||||
discordPresenceRuntime.publishDiscordPresence();
|
||||
}
|
||||
const ensureOverlayMpvSubtitlesHidden = createEnsureOverlayMpvSubtitlesHiddenHandler({
|
||||
getMpvClient: () => appState.mpvClient,
|
||||
@@ -1325,8 +1241,6 @@ const statsCoverArtFetcher = createCoverArtFetcher(
|
||||
const anilistStateRuntime = createAnilistStateRuntime(buildAnilistStateRuntimeMainDepsHandler());
|
||||
const configDerivedRuntime = createConfigDerivedRuntime(buildConfigDerivedRuntimeMainDepsHandler());
|
||||
const subsyncRuntime = createMainSubsyncRuntime(buildMainSubsyncRuntimeMainDepsHandler());
|
||||
let autoPlayReadySignalMediaPath: string | null = null;
|
||||
let autoPlayReadySignalGeneration = 0;
|
||||
const currentMediaTokenizationGate = createCurrentMediaTokenizationGate();
|
||||
const startupOsdSequencer = createStartupOsdSequencer({
|
||||
showOsd: (message) => showMpvOsd(message),
|
||||
@@ -1375,110 +1289,6 @@ async function openYoutubeTrackPickerFromPlayback(): Promise<void> {
|
||||
});
|
||||
}
|
||||
|
||||
function maybeSignalPluginAutoplayReady(
|
||||
payload: SubtitleData,
|
||||
options?: { forceWhilePaused?: boolean },
|
||||
): void {
|
||||
if (youtubePrimarySubtitleNotificationRuntime.isAppOwnedFlowInFlight()) {
|
||||
logger.debug('[autoplay-ready] suppressed while app-owned YouTube flow is active');
|
||||
return;
|
||||
}
|
||||
if (!payload.text.trim()) {
|
||||
return;
|
||||
}
|
||||
const mediaPath =
|
||||
appState.currentMediaPath?.trim() ||
|
||||
appState.mpvClient?.currentVideoPath?.trim() ||
|
||||
'__unknown__';
|
||||
const duplicateMediaSignal = autoPlayReadySignalMediaPath === mediaPath;
|
||||
const allowDuplicateWhilePaused =
|
||||
options?.forceWhilePaused === true && appState.playbackPaused !== false;
|
||||
if (duplicateMediaSignal && !allowDuplicateWhilePaused) {
|
||||
return;
|
||||
}
|
||||
const signalPluginAutoplayReady = (): void => {
|
||||
logger.debug(`[autoplay-ready] signaling mpv for media: ${mediaPath}`);
|
||||
sendMpvCommandRuntime(appState.mpvClient, ['script-message', 'subminer-autoplay-ready']);
|
||||
};
|
||||
if (duplicateMediaSignal && allowDuplicateWhilePaused) {
|
||||
// Keep re-notifying the plugin while paused (for startup visibility sync), but
|
||||
// do not run local unpause fallback on duplicates to avoid resuming user-paused playback.
|
||||
signalPluginAutoplayReady();
|
||||
return;
|
||||
}
|
||||
autoPlayReadySignalMediaPath = mediaPath;
|
||||
const playbackGeneration = ++autoPlayReadySignalGeneration;
|
||||
signalPluginAutoplayReady();
|
||||
const isPlaybackPaused = async (client: {
|
||||
requestProperty: (property: string) => Promise<unknown>;
|
||||
}): Promise<boolean> => {
|
||||
try {
|
||||
const pauseProperty = await client.requestProperty('pause');
|
||||
if (typeof pauseProperty === 'boolean') {
|
||||
return pauseProperty;
|
||||
}
|
||||
if (typeof pauseProperty === 'string') {
|
||||
return pauseProperty.toLowerCase() !== 'no' && pauseProperty !== '0';
|
||||
}
|
||||
if (typeof pauseProperty === 'number') {
|
||||
return pauseProperty !== 0;
|
||||
}
|
||||
logger.debug(
|
||||
`[autoplay-ready] unrecognized pause property for media ${mediaPath}: ${String(pauseProperty)}`,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.debug(
|
||||
`[autoplay-ready] failed to read pause property for media ${mediaPath}: ${(error as Error).message}`,
|
||||
);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
// Fallback: repeatedly try to release pause for a short window in case startup
|
||||
// gate arming and tokenization-ready signal arrive out of order.
|
||||
const releaseRetryDelayMs = 200;
|
||||
const maxReleaseAttempts = resolveAutoplayReadyMaxReleaseAttempts({
|
||||
forceWhilePaused: options?.forceWhilePaused === true,
|
||||
retryDelayMs: releaseRetryDelayMs,
|
||||
});
|
||||
const attemptRelease = (attempt: number): void => {
|
||||
void (async () => {
|
||||
if (
|
||||
autoPlayReadySignalMediaPath !== mediaPath ||
|
||||
playbackGeneration !== autoPlayReadySignalGeneration
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mpvClient = appState.mpvClient;
|
||||
if (!mpvClient?.connected) {
|
||||
if (attempt < maxReleaseAttempts) {
|
||||
setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldUnpause = await isPlaybackPaused(mpvClient);
|
||||
logger.debug(
|
||||
`[autoplay-ready] mpv paused before fallback attempt ${attempt} for ${mediaPath}: ${shouldUnpause}`,
|
||||
);
|
||||
if (!shouldUnpause) {
|
||||
if (attempt === 0) {
|
||||
logger.debug('[autoplay-ready] mpv already playing; no fallback unpause needed');
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
signalPluginAutoplayReady();
|
||||
mpvClient.send({ command: ['set_property', 'pause', false] });
|
||||
if (attempt < maxReleaseAttempts) {
|
||||
setTimeout(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
|
||||
}
|
||||
})();
|
||||
};
|
||||
attemptRelease(0);
|
||||
}
|
||||
|
||||
let appTray: Tray | null = null;
|
||||
let tokenizeSubtitleDeferred: ((text: string) => Promise<SubtitleData>) | null = null;
|
||||
function withCurrentSubtitleTiming(payload: SubtitleData): SubtitleData {
|
||||
@@ -1552,6 +1362,11 @@ const subtitlePrefetchInitController = createSubtitlePrefetchInitController({
|
||||
appState.activeParsedSubtitleSource = sourceKey;
|
||||
},
|
||||
});
|
||||
const resolveActiveSubtitleSidebarSourceHandler = createResolveActiveSubtitleSidebarSourceHandler({
|
||||
getFfmpegPath: () => getResolvedConfig().subsync.ffmpeg_path.trim() || 'ffmpeg',
|
||||
extractInternalSubtitleTrack: (ffmpegPath, videoPath, track) =>
|
||||
extractInternalSubtitleTrackToTempFile(ffmpegPath, videoPath, track),
|
||||
});
|
||||
|
||||
async function refreshSubtitleSidebarFromSource(sourcePath: string): Promise<void> {
|
||||
const normalizedSourcePath = resolveSubtitleSourcePath(sourcePath.trim());
|
||||
@@ -1564,60 +1379,29 @@ async function refreshSubtitleSidebarFromSource(sourcePath: string): Promise<voi
|
||||
normalizedSourcePath,
|
||||
);
|
||||
}
|
||||
|
||||
async function refreshSubtitlePrefetchFromActiveTrack(): Promise<void> {
|
||||
const client = appState.mpvClient;
|
||||
if (!client?.connected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const [currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPathRaw] =
|
||||
await Promise.all([
|
||||
client.requestProperty('current-tracks/sub/external-filename').catch(() => null),
|
||||
client.requestProperty('current-tracks/sub').catch(() => null),
|
||||
client.requestProperty('track-list'),
|
||||
client.requestProperty('sid'),
|
||||
client.requestProperty('path'),
|
||||
]);
|
||||
const videoPath = typeof videoPathRaw === 'string' ? videoPathRaw : '';
|
||||
if (!videoPath) {
|
||||
subtitlePrefetchInitController.cancelPendingInit();
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedSource = await resolveActiveSubtitleSidebarSource(
|
||||
currentExternalFilenameRaw,
|
||||
currentTrackRaw,
|
||||
trackListRaw,
|
||||
sidRaw,
|
||||
videoPath,
|
||||
);
|
||||
if (!resolvedSource) {
|
||||
subtitlePrefetchInitController.cancelPendingInit();
|
||||
return;
|
||||
}
|
||||
try {
|
||||
await subtitlePrefetchInitController.initSubtitlePrefetch(
|
||||
resolvedSource.path,
|
||||
lastObservedTimePos,
|
||||
resolvedSource.sourceKey,
|
||||
);
|
||||
} finally {
|
||||
await resolvedSource.cleanup?.();
|
||||
}
|
||||
} catch {
|
||||
// Track list query failed; skip subtitle prefetch refresh.
|
||||
}
|
||||
}
|
||||
const refreshSubtitlePrefetchFromActiveTrackHandler =
|
||||
createRefreshSubtitlePrefetchFromActiveTrackHandler({
|
||||
getMpvClient: () => appState.mpvClient,
|
||||
getLastObservedTimePos: () => lastObservedTimePos,
|
||||
subtitlePrefetchInitController,
|
||||
resolveActiveSubtitleSidebarSource: (input) =>
|
||||
resolveActiveSubtitleSidebarSourceHandler(input),
|
||||
});
|
||||
|
||||
function scheduleSubtitlePrefetchRefresh(delayMs = 0): void {
|
||||
clearScheduledSubtitlePrefetchRefresh();
|
||||
subtitlePrefetchRefreshTimer = setTimeout(() => {
|
||||
subtitlePrefetchRefreshTimer = null;
|
||||
void refreshSubtitlePrefetchFromActiveTrack();
|
||||
void refreshSubtitlePrefetchFromActiveTrackHandler();
|
||||
}, delayMs);
|
||||
}
|
||||
const subtitlePrefetchRuntime = composeSubtitlePrefetchRuntime({
|
||||
subtitlePrefetchInitController,
|
||||
refreshSubtitleSidebarFromSource: (sourcePath) => refreshSubtitleSidebarFromSource(sourcePath),
|
||||
refreshSubtitlePrefetchFromActiveTrack: () => refreshSubtitlePrefetchFromActiveTrackHandler(),
|
||||
scheduleSubtitlePrefetchRefresh: (delayMs) => scheduleSubtitlePrefetchRefresh(delayMs),
|
||||
clearScheduledSubtitlePrefetchRefresh: () => clearScheduledSubtitlePrefetchRefresh(),
|
||||
});
|
||||
|
||||
const overlayShortcutsRuntime = createOverlayShortcutsRuntimeService(
|
||||
createBuildOverlayShortcutsRuntimeMainDepsHandler({
|
||||
@@ -2009,7 +1793,7 @@ const characterDictionaryAutoSyncRuntime = createCharacterDictionaryAutoSyncRunt
|
||||
const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService(
|
||||
createBuildOverlayVisibilityRuntimeMainDepsHandler({
|
||||
getMainWindow: () => overlayManager.getMainWindow(),
|
||||
getModalActive: () => overlayModalInputExclusive,
|
||||
getModalActive: () => overlayModalInputState.getModalInputExclusive(),
|
||||
getVisibleOverlayVisible: () => overlayManager.getVisibleOverlayVisible(),
|
||||
getForceMousePassthrough: () => appState.statsOverlayVisible,
|
||||
getWindowTracker: () => appState.windowTracker,
|
||||
@@ -2048,12 +1832,10 @@ const overlayVisibilityRuntime = createOverlayVisibilityRuntimeService(
|
||||
},
|
||||
})(),
|
||||
);
|
||||
|
||||
const buildGetRuntimeOptionsStateMainDepsHandler = createBuildGetRuntimeOptionsStateMainDepsHandler(
|
||||
{
|
||||
const buildGetRuntimeOptionsStateMainDepsHandler =
|
||||
createBuildGetRuntimeOptionsStateMainDepsHandler({
|
||||
getRuntimeOptionsManager: () => appState.runtimeOptionsManager,
|
||||
},
|
||||
);
|
||||
});
|
||||
const getRuntimeOptionsStateMainDeps = buildGetRuntimeOptionsStateMainDepsHandler();
|
||||
const getRuntimeOptionsStateHandler = createGetRuntimeOptionsStateHandler(
|
||||
getRuntimeOptionsStateMainDeps,
|
||||
@@ -2074,14 +1856,6 @@ const buildRestorePreviousSecondarySubVisibilityMainDepsHandler =
|
||||
syncOverlayVisibilityForModal = () => {
|
||||
overlayVisibilityRuntime.updateVisibleOverlayVisibility();
|
||||
};
|
||||
const restorePreviousSecondarySubVisibilityMainDeps =
|
||||
buildRestorePreviousSecondarySubVisibilityMainDepsHandler();
|
||||
const restorePreviousSecondarySubVisibilityHandler =
|
||||
createRestorePreviousSecondarySubVisibilityHandler(restorePreviousSecondarySubVisibilityMainDeps);
|
||||
|
||||
function restorePreviousSecondarySubVisibility(): void {
|
||||
restorePreviousSecondarySubVisibilityHandler();
|
||||
}
|
||||
|
||||
function broadcastToOverlayWindows(channel: string, ...args: unknown[]): void {
|
||||
overlayManager.broadcastToOverlayWindows(channel, ...args);
|
||||
@@ -2093,32 +1867,12 @@ const buildBroadcastRuntimeOptionsChangedMainDepsHandler =
|
||||
getRuntimeOptionsState: () => getRuntimeOptionsState(),
|
||||
broadcastToOverlayWindows: (channel, ...args) => broadcastToOverlayWindows(channel, ...args),
|
||||
});
|
||||
const broadcastRuntimeOptionsChangedMainDeps = buildBroadcastRuntimeOptionsChangedMainDepsHandler();
|
||||
const broadcastRuntimeOptionsChangedHandler = createBroadcastRuntimeOptionsChangedHandler(
|
||||
broadcastRuntimeOptionsChangedMainDeps,
|
||||
);
|
||||
|
||||
function broadcastRuntimeOptionsChanged(): void {
|
||||
broadcastRuntimeOptionsChangedHandler();
|
||||
}
|
||||
|
||||
const buildSendToActiveOverlayWindowMainDepsHandler =
|
||||
createBuildSendToActiveOverlayWindowMainDepsHandler({
|
||||
sendToActiveOverlayWindowRuntime: (channel, payload, runtimeOptions) =>
|
||||
overlayModalRuntime.sendToActiveOverlayWindow(channel, payload, runtimeOptions),
|
||||
});
|
||||
const sendToActiveOverlayWindowMainDeps = buildSendToActiveOverlayWindowMainDepsHandler();
|
||||
const sendToActiveOverlayWindowHandler = createSendToActiveOverlayWindowHandler(
|
||||
sendToActiveOverlayWindowMainDeps,
|
||||
);
|
||||
|
||||
function sendToActiveOverlayWindow(
|
||||
channel: string,
|
||||
payload?: unknown,
|
||||
runtimeOptions?: { restoreOnModalClose?: OverlayHostedModal },
|
||||
): boolean {
|
||||
return sendToActiveOverlayWindowHandler(channel, payload, runtimeOptions);
|
||||
}
|
||||
|
||||
const buildSetOverlayDebugVisualizationEnabledMainDepsHandler =
|
||||
createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler({
|
||||
@@ -2128,27 +1882,44 @@ const buildSetOverlayDebugVisualizationEnabledMainDepsHandler =
|
||||
appState.overlayDebugVisualizationEnabled = next;
|
||||
},
|
||||
});
|
||||
const setOverlayDebugVisualizationEnabledMainDeps =
|
||||
buildSetOverlayDebugVisualizationEnabledMainDepsHandler();
|
||||
const setOverlayDebugVisualizationEnabledHandler = createSetOverlayDebugVisualizationEnabledHandler(
|
||||
setOverlayDebugVisualizationEnabledMainDeps,
|
||||
);
|
||||
|
||||
function setOverlayDebugVisualizationEnabled(enabled: boolean): void {
|
||||
setOverlayDebugVisualizationEnabledHandler(enabled);
|
||||
}
|
||||
|
||||
const buildOpenRuntimeOptionsPaletteMainDepsHandler =
|
||||
createBuildOpenRuntimeOptionsPaletteMainDepsHandler({
|
||||
openRuntimeOptionsPaletteRuntime: () => overlayModalRuntime.openRuntimeOptionsPalette(),
|
||||
});
|
||||
const openRuntimeOptionsPaletteMainDeps = buildOpenRuntimeOptionsPaletteMainDepsHandler();
|
||||
const openRuntimeOptionsPaletteHandler = createOpenRuntimeOptionsPaletteHandler(
|
||||
openRuntimeOptionsPaletteMainDeps,
|
||||
);
|
||||
const overlayVisibilityComposer = composeOverlayVisibilityRuntime({
|
||||
overlayVisibilityRuntime,
|
||||
restorePreviousSecondarySubVisibilityMainDeps:
|
||||
buildRestorePreviousSecondarySubVisibilityMainDepsHandler(),
|
||||
broadcastRuntimeOptionsChangedMainDeps: buildBroadcastRuntimeOptionsChangedMainDepsHandler(),
|
||||
sendToActiveOverlayWindowMainDeps: buildSendToActiveOverlayWindowMainDepsHandler(),
|
||||
setOverlayDebugVisualizationEnabledMainDeps:
|
||||
buildSetOverlayDebugVisualizationEnabledMainDepsHandler(),
|
||||
openRuntimeOptionsPaletteMainDeps: buildOpenRuntimeOptionsPaletteMainDepsHandler(),
|
||||
});
|
||||
|
||||
function restorePreviousSecondarySubVisibility(): void {
|
||||
overlayVisibilityComposer.restorePreviousSecondarySubVisibility();
|
||||
}
|
||||
|
||||
function broadcastRuntimeOptionsChanged(): void {
|
||||
overlayVisibilityComposer.broadcastRuntimeOptionsChanged();
|
||||
}
|
||||
|
||||
function sendToActiveOverlayWindow(
|
||||
channel: string,
|
||||
payload?: unknown,
|
||||
runtimeOptions?: { restoreOnModalClose?: OverlayHostedModal },
|
||||
): boolean {
|
||||
return overlayVisibilityComposer.sendToActiveOverlayWindow(channel, payload, runtimeOptions);
|
||||
}
|
||||
|
||||
function setOverlayDebugVisualizationEnabled(enabled: boolean): void {
|
||||
overlayVisibilityComposer.setOverlayDebugVisualizationEnabled(enabled);
|
||||
}
|
||||
|
||||
function openRuntimeOptionsPalette(): void {
|
||||
openRuntimeOptionsPaletteHandler();
|
||||
overlayVisibilityComposer.openRuntimeOptionsPalette();
|
||||
}
|
||||
|
||||
function getResolvedConfig() {
|
||||
@@ -3179,9 +2950,10 @@ const ensureImmersionTrackerStarted = (): void => {
|
||||
hasAttemptedImmersionTrackerStartup = true;
|
||||
createImmersionTrackerStartup();
|
||||
};
|
||||
|
||||
const runStatsCliCommand = createRunStatsCliCommandHandler({
|
||||
getResolvedConfig: () => getResolvedConfig(),
|
||||
const statsStartupRuntime = composeStatsStartupRuntime({
|
||||
ensureStatsServerStarted: () => ensureStatsServerStarted(),
|
||||
ensureBackgroundStatsServerStarted: () => ensureBackgroundStatsServerStarted(),
|
||||
stopBackgroundStatsServer: () => stopBackgroundStatsServer(),
|
||||
ensureImmersionTrackerStarted: () => {
|
||||
appState.statsStartupInProgress = true;
|
||||
try {
|
||||
@@ -3190,13 +2962,18 @@ const runStatsCliCommand = createRunStatsCliCommandHandler({
|
||||
appState.statsStartupInProgress = false;
|
||||
}
|
||||
},
|
||||
});
|
||||
|
||||
const runStatsCliCommand = createRunStatsCliCommandHandler({
|
||||
getResolvedConfig: () => getResolvedConfig(),
|
||||
ensureImmersionTrackerStarted: () => statsStartupRuntime.ensureImmersionTrackerStarted(),
|
||||
ensureVocabularyCleanupTokenizerReady: async () => {
|
||||
await createMecabTokenizerAndCheck();
|
||||
},
|
||||
getImmersionTracker: () => appState.immersionTracker,
|
||||
ensureStatsServerStarted: () => ensureStatsServerStarted(),
|
||||
ensureBackgroundStatsServerStarted: () => ensureBackgroundStatsServerStarted(),
|
||||
stopBackgroundStatsServer: () => stopBackgroundStatsServer(),
|
||||
ensureStatsServerStarted: () => statsStartupRuntime.ensureStatsServerStarted(),
|
||||
ensureBackgroundStatsServerStarted: () => statsStartupRuntime.ensureBackgroundStatsServerStarted(),
|
||||
stopBackgroundStatsServer: () => statsStartupRuntime.stopBackgroundStatsServer(),
|
||||
openExternal: (url: string) => shell.openExternal(url),
|
||||
writeResponse: (responsePath, payload) => {
|
||||
writeStatsCliCommandResponse(responsePath, payload);
|
||||
@@ -3497,7 +3274,7 @@ const {
|
||||
bindMpvMainEventHandlersMainDeps: {
|
||||
appState,
|
||||
getQuitOnDisconnectArmed: () =>
|
||||
jellyfinPlayQuitOnDisconnectArmed || youtubePlayQuitOnDisconnectArmed,
|
||||
jellyfinPlayQuitOnDisconnectArmed || youtubePlaybackRuntime.getQuitOnDisconnectArmed(),
|
||||
scheduleQuitCheck: (callback) => {
|
||||
setTimeout(callback, 500);
|
||||
},
|
||||
@@ -3519,7 +3296,7 @@ const {
|
||||
subtitleProcessingController.onSubtitleChange(text);
|
||||
},
|
||||
refreshDiscordPresence: () => {
|
||||
publishDiscordPresence();
|
||||
discordPresenceRuntime.publishDiscordPresence();
|
||||
},
|
||||
ensureImmersionTrackerInitialized: () => {
|
||||
ensureImmersionTrackerStarted();
|
||||
@@ -3527,16 +3304,16 @@ const {
|
||||
tokenizeSubtitleForImmersion: async (text): Promise<SubtitleData | null> =>
|
||||
tokenizeSubtitleDeferred ? await tokenizeSubtitleDeferred(text) : null,
|
||||
updateCurrentMediaPath: (path) => {
|
||||
autoPlayReadySignalMediaPath = null;
|
||||
autoplayReadyGate.invalidatePendingAutoplayReadyFallbacks();
|
||||
currentMediaTokenizationGate.updateCurrentMediaPath(path);
|
||||
startupOsdSequencer.reset();
|
||||
clearScheduledSubtitlePrefetchRefresh();
|
||||
subtitlePrefetchInitController.cancelPendingInit();
|
||||
subtitlePrefetchRuntime.clearScheduledSubtitlePrefetchRefresh();
|
||||
subtitlePrefetchRuntime.cancelPendingInit();
|
||||
youtubePrimarySubtitleNotificationRuntime.handleMediaPathChange(path);
|
||||
if (path) {
|
||||
ensureImmersionTrackerStarted();
|
||||
// Delay slightly to allow MPV's track-list to be populated.
|
||||
scheduleSubtitlePrefetchRefresh(500);
|
||||
subtitlePrefetchRuntime.scheduleSubtitlePrefetchRefresh(500);
|
||||
}
|
||||
mediaRuntime.updateCurrentMediaPath(path);
|
||||
},
|
||||
@@ -3563,7 +3340,7 @@ const {
|
||||
if (!isTokenizationWarmupReady()) {
|
||||
return;
|
||||
}
|
||||
maybeSignalPluginAutoplayReady(
|
||||
autoplayReadyGate.maybeSignalPluginAutoplayReady(
|
||||
{ text: '__warm__', tokens: null },
|
||||
{ forceWhilePaused: true },
|
||||
);
|
||||
@@ -3684,7 +3461,10 @@ const {
|
||||
appState.currentMediaPath?.trim() || appState.mpvClient?.currentVideoPath?.trim() || null,
|
||||
);
|
||||
startupOsdSequencer.markTokenizationReady();
|
||||
maybeSignalPluginAutoplayReady({ text, tokens: null }, { forceWhilePaused: true });
|
||||
autoplayReadyGate.maybeSignalPluginAutoplayReady(
|
||||
{ text, tokens: null },
|
||||
{ forceWhilePaused: true },
|
||||
);
|
||||
},
|
||||
},
|
||||
createTokenizerRuntimeDeps: (deps) =>
|
||||
@@ -4254,48 +4034,6 @@ function parseTrackId(value: unknown): number | null {
|
||||
return null;
|
||||
}
|
||||
|
||||
function getActiveSubtitleTrack(
|
||||
currentTrackRaw: unknown,
|
||||
trackListRaw: unknown,
|
||||
sidRaw: unknown,
|
||||
): MpvSubtitleTrackLike | null {
|
||||
if (currentTrackRaw && typeof currentTrackRaw === 'object') {
|
||||
const track = currentTrackRaw as MpvSubtitleTrackLike;
|
||||
if (track.type === undefined || track.type === 'sub') {
|
||||
return track;
|
||||
}
|
||||
}
|
||||
|
||||
const sid = parseTrackId(sidRaw);
|
||||
if (!Array.isArray(trackListRaw)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const bySid =
|
||||
sid === null
|
||||
? null
|
||||
: ((trackListRaw.find((entry: unknown) => {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const track = entry as MpvSubtitleTrackLike;
|
||||
return track.type === 'sub' && parseTrackId(track.id) === sid;
|
||||
}) as MpvSubtitleTrackLike | undefined) ?? null);
|
||||
if (bySid) {
|
||||
return bySid;
|
||||
}
|
||||
|
||||
return (
|
||||
(trackListRaw.find((entry: unknown) => {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const track = entry as MpvSubtitleTrackLike;
|
||||
return track.type === 'sub' && track.selected === true;
|
||||
}) as MpvSubtitleTrackLike | undefined) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
function buildFfmpegSubtitleExtractionArgs(
|
||||
videoPath: string,
|
||||
ffIndex: number,
|
||||
@@ -4368,41 +4106,6 @@ async function extractInternalSubtitleTrackToTempFile(
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveActiveSubtitleSidebarSource(
|
||||
currentExternalFilenameRaw: unknown,
|
||||
currentTrackRaw: unknown,
|
||||
trackListRaw: unknown,
|
||||
sidRaw: unknown,
|
||||
videoPath: string,
|
||||
): Promise<{ path: string; sourceKey: string; cleanup?: () => Promise<void> } | null> {
|
||||
const currentExternalFilename =
|
||||
typeof currentExternalFilenameRaw === 'string' ? currentExternalFilenameRaw.trim() : '';
|
||||
if (currentExternalFilename) {
|
||||
return { path: currentExternalFilename, sourceKey: currentExternalFilename };
|
||||
}
|
||||
|
||||
const track = getActiveSubtitleTrack(currentTrackRaw, trackListRaw, sidRaw);
|
||||
if (!track) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const externalFilename =
|
||||
typeof track['external-filename'] === 'string' ? track['external-filename'].trim() : '';
|
||||
if (externalFilename) {
|
||||
return { path: externalFilename, sourceKey: externalFilename };
|
||||
}
|
||||
|
||||
const ffmpegPath = getResolvedConfig().subsync.ffmpeg_path.trim() || 'ffmpeg';
|
||||
const extracted = await extractInternalSubtitleTrackToTempFile(ffmpegPath, videoPath, track);
|
||||
if (!extracted) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
...extracted,
|
||||
sourceKey: buildSubtitleSidebarSourceKey(videoPath, track, extracted.path),
|
||||
};
|
||||
}
|
||||
|
||||
const shiftSubtitleDelayToAdjacentCueHandler = createShiftSubtitleDelayToAdjacentCueHandler({
|
||||
getMpvClient: () => appState.mpvClient,
|
||||
loadSubtitleSourceText,
|
||||
@@ -4503,13 +4206,13 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({
|
||||
};
|
||||
}
|
||||
|
||||
const resolvedSource = await resolveActiveSubtitleSidebarSource(
|
||||
const resolvedSource = await resolveActiveSubtitleSidebarSourceHandler({
|
||||
currentExternalFilenameRaw,
|
||||
currentTrackRaw,
|
||||
trackListRaw,
|
||||
sidRaw,
|
||||
videoPath,
|
||||
);
|
||||
});
|
||||
if (!resolvedSource) {
|
||||
return {
|
||||
cues: appState.activeParsedSubtitleCues,
|
||||
@@ -4670,7 +4373,7 @@ const { handleCliCommand, handleInitialArgs } = composeCliStartupHandlers({
|
||||
runJellyfinCommand: (argsFromCommand: CliArgs) => runJellyfinCommand(argsFromCommand),
|
||||
runStatsCommand: (argsFromCommand: CliArgs, source: CliCommandSource) =>
|
||||
runStatsCliCommand(argsFromCommand, source),
|
||||
runYoutubePlaybackFlow: (request) => runYoutubePlaybackFlowMain(request),
|
||||
runYoutubePlaybackFlow: (request) => youtubePlaybackRuntime.runYoutubePlaybackFlow(request),
|
||||
openYomitanSettings: () => openYomitanSettings(),
|
||||
cycleSecondarySubMode: () => handleCycleSecondarySubMode(),
|
||||
openRuntimeOptionsPalette: () => openRuntimeOptionsPalette(),
|
||||
|
||||
45
src/main/runtime/autoplay-ready-gate.test.ts
Normal file
45
src/main/runtime/autoplay-ready-gate.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { createAutoplayReadyGate } from './autoplay-ready-gate';
|
||||
|
||||
test('autoplay ready gate suppresses duplicate media signals unless forced while paused', async () => {
|
||||
const commands: Array<Array<string | boolean>> = [];
|
||||
const scheduled: Array<() => void> = [];
|
||||
|
||||
const gate = createAutoplayReadyGate({
|
||||
isAppOwnedFlowInFlight: () => false,
|
||||
getCurrentMediaPath: () => '/media/video.mkv',
|
||||
getCurrentVideoPath: () => null,
|
||||
getPlaybackPaused: () => true,
|
||||
getMpvClient: () =>
|
||||
({
|
||||
connected: true,
|
||||
requestProperty: async () => true,
|
||||
send: ({ command }: { command: Array<string | boolean> }) => {
|
||||
commands.push(command);
|
||||
},
|
||||
}) as never,
|
||||
signalPluginAutoplayReady: () => {
|
||||
commands.push(['script-message', 'subminer-autoplay-ready']);
|
||||
},
|
||||
schedule: (callback) => {
|
||||
scheduled.push(callback);
|
||||
return 1 as never;
|
||||
},
|
||||
logDebug: () => {},
|
||||
});
|
||||
|
||||
gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null });
|
||||
gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null });
|
||||
gate.maybeSignalPluginAutoplayReady({ text: '字幕', tokens: null }, { forceWhilePaused: true });
|
||||
|
||||
await new Promise((resolve) => setTimeout(resolve, 0));
|
||||
|
||||
assert.deepEqual(commands.slice(0, 3), [
|
||||
['script-message', 'subminer-autoplay-ready'],
|
||||
['script-message', 'subminer-autoplay-ready'],
|
||||
['script-message', 'subminer-autoplay-ready'],
|
||||
]);
|
||||
assert.ok(commands.some((command) => command[0] === 'set_property' && command[1] === 'pause'));
|
||||
assert.equal(scheduled.length > 0, true);
|
||||
});
|
||||
129
src/main/runtime/autoplay-ready-gate.ts
Normal file
129
src/main/runtime/autoplay-ready-gate.ts
Normal file
@@ -0,0 +1,129 @@
|
||||
import type { SubtitleData } from '../../types';
|
||||
import { resolveAutoplayReadyMaxReleaseAttempts } from './startup-autoplay-release-policy';
|
||||
|
||||
type MpvClientLike = {
|
||||
connected?: boolean;
|
||||
requestProperty: (property: string) => Promise<unknown>;
|
||||
send: (payload: { command: Array<string | boolean> }) => void;
|
||||
};
|
||||
|
||||
export type AutoplayReadyGateDeps = {
|
||||
isAppOwnedFlowInFlight: () => boolean;
|
||||
getCurrentMediaPath: () => string | null;
|
||||
getCurrentVideoPath: () => string | null;
|
||||
getPlaybackPaused: () => boolean | null;
|
||||
getMpvClient: () => MpvClientLike | null;
|
||||
signalPluginAutoplayReady: () => void;
|
||||
schedule: (callback: () => void, delayMs: number) => ReturnType<typeof setTimeout>;
|
||||
logDebug: (message: string) => void;
|
||||
};
|
||||
|
||||
export function createAutoplayReadyGate(deps: AutoplayReadyGateDeps) {
|
||||
let autoPlayReadySignalMediaPath: string | null = null;
|
||||
let autoPlayReadySignalGeneration = 0;
|
||||
|
||||
const invalidatePendingAutoplayReadyFallbacks = (): void => {
|
||||
autoPlayReadySignalMediaPath = null;
|
||||
autoPlayReadySignalGeneration += 1;
|
||||
};
|
||||
|
||||
const maybeSignalPluginAutoplayReady = (
|
||||
payload: SubtitleData,
|
||||
options?: { forceWhilePaused?: boolean },
|
||||
): void => {
|
||||
if (deps.isAppOwnedFlowInFlight()) {
|
||||
deps.logDebug('[autoplay-ready] suppressed while app-owned YouTube flow is active');
|
||||
return;
|
||||
}
|
||||
if (!payload.text.trim()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mediaPath =
|
||||
deps.getCurrentMediaPath()?.trim() ||
|
||||
deps.getCurrentVideoPath()?.trim() ||
|
||||
'__unknown__';
|
||||
const duplicateMediaSignal = autoPlayReadySignalMediaPath === mediaPath;
|
||||
const allowDuplicateWhilePaused =
|
||||
options?.forceWhilePaused === true && deps.getPlaybackPaused() !== false;
|
||||
if (duplicateMediaSignal && !allowDuplicateWhilePaused) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (duplicateMediaSignal && allowDuplicateWhilePaused) {
|
||||
deps.signalPluginAutoplayReady();
|
||||
return;
|
||||
}
|
||||
|
||||
autoPlayReadySignalMediaPath = mediaPath;
|
||||
const playbackGeneration = ++autoPlayReadySignalGeneration;
|
||||
deps.signalPluginAutoplayReady();
|
||||
|
||||
const releaseRetryDelayMs = 200;
|
||||
const maxReleaseAttempts = resolveAutoplayReadyMaxReleaseAttempts({
|
||||
forceWhilePaused: options?.forceWhilePaused === true,
|
||||
retryDelayMs: releaseRetryDelayMs,
|
||||
});
|
||||
|
||||
const isPlaybackPaused = async (client: MpvClientLike): Promise<boolean> => {
|
||||
try {
|
||||
const pauseProperty = await client.requestProperty('pause');
|
||||
if (typeof pauseProperty === 'boolean') {
|
||||
return pauseProperty;
|
||||
}
|
||||
if (typeof pauseProperty === 'string') {
|
||||
return pauseProperty.toLowerCase() !== 'no' && pauseProperty !== '0';
|
||||
}
|
||||
if (typeof pauseProperty === 'number') {
|
||||
return pauseProperty !== 0;
|
||||
}
|
||||
} catch (error) {
|
||||
deps.logDebug(
|
||||
`[autoplay-ready] failed to read pause property for media ${mediaPath}: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
|
||||
const attemptRelease = (attempt: number): void => {
|
||||
void (async () => {
|
||||
if (
|
||||
autoPlayReadySignalMediaPath !== mediaPath ||
|
||||
playbackGeneration !== autoPlayReadySignalGeneration
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const mpvClient = deps.getMpvClient();
|
||||
if (!mpvClient?.connected) {
|
||||
if (attempt < maxReleaseAttempts) {
|
||||
deps.schedule(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldUnpause = await isPlaybackPaused(mpvClient);
|
||||
if (!shouldUnpause) {
|
||||
return;
|
||||
}
|
||||
|
||||
deps.signalPluginAutoplayReady();
|
||||
mpvClient.send({ command: ['set_property', 'pause', false] });
|
||||
if (attempt < maxReleaseAttempts) {
|
||||
deps.schedule(() => attemptRelease(attempt + 1), releaseRetryDelayMs);
|
||||
}
|
||||
})();
|
||||
};
|
||||
|
||||
attemptRelease(0);
|
||||
};
|
||||
|
||||
return {
|
||||
getAutoPlayReadySignalMediaPath: (): string | null => autoPlayReadySignalMediaPath,
|
||||
invalidatePendingAutoplayReadyFallbacks,
|
||||
maybeSignalPluginAutoplayReady,
|
||||
};
|
||||
}
|
||||
@@ -9,5 +9,8 @@ export * from './jellyfin-remote-composer';
|
||||
export * from './jellyfin-runtime-composer';
|
||||
export * from './mpv-runtime-composer';
|
||||
export * from './overlay-window-composer';
|
||||
export * from './overlay-visibility-runtime-composer';
|
||||
export * from './shortcuts-runtime-composer';
|
||||
export * from './stats-startup-composer';
|
||||
export * from './subtitle-prefetch-runtime-composer';
|
||||
export * from './startup-lifecycle-composer';
|
||||
|
||||
@@ -0,0 +1,37 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { composeOverlayVisibilityRuntime } from './overlay-visibility-runtime-composer';
|
||||
|
||||
test('composeOverlayVisibilityRuntime returns overlay visibility handlers', () => {
|
||||
const composed = composeOverlayVisibilityRuntime({
|
||||
overlayVisibilityRuntime: {
|
||||
updateVisibleOverlayVisibility: () => {},
|
||||
},
|
||||
restorePreviousSecondarySubVisibilityMainDeps: {
|
||||
getMpvClient: () => null,
|
||||
},
|
||||
broadcastRuntimeOptionsChangedMainDeps: {
|
||||
broadcastRuntimeOptionsChangedRuntime: () => {},
|
||||
getRuntimeOptionsState: () => [],
|
||||
broadcastToOverlayWindows: () => {},
|
||||
},
|
||||
sendToActiveOverlayWindowMainDeps: {
|
||||
sendToActiveOverlayWindowRuntime: () => true,
|
||||
},
|
||||
setOverlayDebugVisualizationEnabledMainDeps: {
|
||||
setOverlayDebugVisualizationEnabledRuntime: () => {},
|
||||
getCurrentEnabled: () => false,
|
||||
setCurrentEnabled: () => {},
|
||||
},
|
||||
openRuntimeOptionsPaletteMainDeps: {
|
||||
openRuntimeOptionsPaletteRuntime: () => {},
|
||||
},
|
||||
});
|
||||
|
||||
assert.equal(typeof composed.updateVisibleOverlayVisibility, 'function');
|
||||
assert.equal(typeof composed.restorePreviousSecondarySubVisibility, 'function');
|
||||
assert.equal(typeof composed.broadcastRuntimeOptionsChanged, 'function');
|
||||
assert.equal(typeof composed.sendToActiveOverlayWindow, 'function');
|
||||
assert.equal(typeof composed.setOverlayDebugVisualizationEnabled, 'function');
|
||||
assert.equal(typeof composed.openRuntimeOptionsPalette, 'function');
|
||||
});
|
||||
@@ -0,0 +1,88 @@
|
||||
import {
|
||||
createBroadcastRuntimeOptionsChangedHandler,
|
||||
createOpenRuntimeOptionsPaletteHandler,
|
||||
createRestorePreviousSecondarySubVisibilityHandler,
|
||||
createSendToActiveOverlayWindowHandler,
|
||||
createSetOverlayDebugVisualizationEnabledHandler,
|
||||
} from '../overlay-runtime-main-actions';
|
||||
import {
|
||||
createBuildBroadcastRuntimeOptionsChangedMainDepsHandler,
|
||||
createBuildOpenRuntimeOptionsPaletteMainDepsHandler,
|
||||
createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler,
|
||||
createBuildSendToActiveOverlayWindowMainDepsHandler,
|
||||
createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler,
|
||||
} from '../overlay-runtime-main-actions-main-deps';
|
||||
import type { ComposerInputs, ComposerOutputs } from './contracts';
|
||||
|
||||
type RestorePreviousSecondarySubVisibilityMainDeps = Parameters<
|
||||
typeof createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler
|
||||
>[0];
|
||||
type BroadcastRuntimeOptionsChangedMainDeps = Parameters<
|
||||
typeof createBuildBroadcastRuntimeOptionsChangedMainDepsHandler
|
||||
>[0];
|
||||
type SendToActiveOverlayWindowMainDeps = Parameters<
|
||||
typeof createBuildSendToActiveOverlayWindowMainDepsHandler
|
||||
>[0];
|
||||
type SetOverlayDebugVisualizationEnabledMainDeps = Parameters<
|
||||
typeof createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler
|
||||
>[0];
|
||||
type OpenRuntimeOptionsPaletteMainDeps = Parameters<
|
||||
typeof createBuildOpenRuntimeOptionsPaletteMainDepsHandler
|
||||
>[0];
|
||||
|
||||
export type OverlayVisibilityRuntimeComposerOptions = ComposerInputs<{
|
||||
overlayVisibilityRuntime: {
|
||||
updateVisibleOverlayVisibility: () => void;
|
||||
};
|
||||
restorePreviousSecondarySubVisibilityMainDeps: RestorePreviousSecondarySubVisibilityMainDeps;
|
||||
broadcastRuntimeOptionsChangedMainDeps: BroadcastRuntimeOptionsChangedMainDeps;
|
||||
sendToActiveOverlayWindowMainDeps: SendToActiveOverlayWindowMainDeps;
|
||||
setOverlayDebugVisualizationEnabledMainDeps: SetOverlayDebugVisualizationEnabledMainDeps;
|
||||
openRuntimeOptionsPaletteMainDeps: OpenRuntimeOptionsPaletteMainDeps;
|
||||
}>;
|
||||
|
||||
export type OverlayVisibilityRuntimeComposerResult = ComposerOutputs<{
|
||||
updateVisibleOverlayVisibility: () => void;
|
||||
restorePreviousSecondarySubVisibility: ReturnType<
|
||||
typeof createRestorePreviousSecondarySubVisibilityHandler
|
||||
>;
|
||||
broadcastRuntimeOptionsChanged: ReturnType<typeof createBroadcastRuntimeOptionsChangedHandler>;
|
||||
sendToActiveOverlayWindow: ReturnType<typeof createSendToActiveOverlayWindowHandler>;
|
||||
setOverlayDebugVisualizationEnabled: ReturnType<
|
||||
typeof createSetOverlayDebugVisualizationEnabledHandler
|
||||
>;
|
||||
openRuntimeOptionsPalette: ReturnType<typeof createOpenRuntimeOptionsPaletteHandler>;
|
||||
}>;
|
||||
|
||||
export function composeOverlayVisibilityRuntime(
|
||||
options: OverlayVisibilityRuntimeComposerOptions,
|
||||
): OverlayVisibilityRuntimeComposerResult {
|
||||
return {
|
||||
updateVisibleOverlayVisibility: () => options.overlayVisibilityRuntime.updateVisibleOverlayVisibility(),
|
||||
restorePreviousSecondarySubVisibility: createRestorePreviousSecondarySubVisibilityHandler(
|
||||
createBuildRestorePreviousSecondarySubVisibilityMainDepsHandler(
|
||||
options.restorePreviousSecondarySubVisibilityMainDeps,
|
||||
)(),
|
||||
),
|
||||
broadcastRuntimeOptionsChanged: createBroadcastRuntimeOptionsChangedHandler(
|
||||
createBuildBroadcastRuntimeOptionsChangedMainDepsHandler(
|
||||
options.broadcastRuntimeOptionsChangedMainDeps,
|
||||
)(),
|
||||
),
|
||||
sendToActiveOverlayWindow: createSendToActiveOverlayWindowHandler(
|
||||
createBuildSendToActiveOverlayWindowMainDepsHandler(
|
||||
options.sendToActiveOverlayWindowMainDeps,
|
||||
)(),
|
||||
),
|
||||
setOverlayDebugVisualizationEnabled: createSetOverlayDebugVisualizationEnabledHandler(
|
||||
createBuildSetOverlayDebugVisualizationEnabledMainDepsHandler(
|
||||
options.setOverlayDebugVisualizationEnabledMainDeps,
|
||||
)(),
|
||||
),
|
||||
openRuntimeOptionsPalette: createOpenRuntimeOptionsPaletteHandler(
|
||||
createBuildOpenRuntimeOptionsPaletteMainDepsHandler(
|
||||
options.openRuntimeOptionsPaletteMainDeps,
|
||||
)(),
|
||||
),
|
||||
};
|
||||
}
|
||||
23
src/main/runtime/composers/stats-startup-composer.test.ts
Normal file
23
src/main/runtime/composers/stats-startup-composer.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { composeStatsStartupRuntime } from './stats-startup-composer';
|
||||
|
||||
test('composeStatsStartupRuntime returns stats startup handlers', async () => {
|
||||
const composed = composeStatsStartupRuntime({
|
||||
ensureStatsServerStarted: () => 'http://127.0.0.1:8766',
|
||||
ensureBackgroundStatsServerStarted: () => ({
|
||||
url: 'http://127.0.0.1:8766',
|
||||
runningInCurrentProcess: true,
|
||||
}),
|
||||
stopBackgroundStatsServer: async () => ({ ok: true, stale: false }),
|
||||
ensureImmersionTrackerStarted: () => {},
|
||||
});
|
||||
|
||||
assert.equal(composed.ensureStatsServerStarted(), 'http://127.0.0.1:8766');
|
||||
assert.deepEqual(composed.ensureBackgroundStatsServerStarted(), {
|
||||
url: 'http://127.0.0.1:8766',
|
||||
runningInCurrentProcess: true,
|
||||
});
|
||||
assert.deepEqual(await composed.stopBackgroundStatsServer(), { ok: true, stale: false });
|
||||
assert.equal(typeof composed.ensureImmersionTrackerStarted, 'function');
|
||||
});
|
||||
26
src/main/runtime/composers/stats-startup-composer.ts
Normal file
26
src/main/runtime/composers/stats-startup-composer.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import type { ComposerInputs, ComposerOutputs } from './contracts';
|
||||
|
||||
type BackgroundStatsStartResult = {
|
||||
url: string;
|
||||
runningInCurrentProcess: boolean;
|
||||
};
|
||||
|
||||
type BackgroundStatsStopResult = {
|
||||
ok: boolean;
|
||||
stale: boolean;
|
||||
};
|
||||
|
||||
export type StatsStartupComposerOptions = ComposerInputs<{
|
||||
ensureStatsServerStarted: () => string;
|
||||
ensureBackgroundStatsServerStarted: () => BackgroundStatsStartResult;
|
||||
stopBackgroundStatsServer: () => Promise<BackgroundStatsStopResult> | BackgroundStatsStopResult;
|
||||
ensureImmersionTrackerStarted: () => void;
|
||||
}>;
|
||||
|
||||
export type StatsStartupComposerResult = ComposerOutputs<StatsStartupComposerOptions>;
|
||||
|
||||
export function composeStatsStartupRuntime(
|
||||
options: StatsStartupComposerOptions,
|
||||
): StatsStartupComposerResult {
|
||||
return options;
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { composeSubtitlePrefetchRuntime } from './subtitle-prefetch-runtime-composer';
|
||||
|
||||
test('composeSubtitlePrefetchRuntime returns subtitle prefetch runtime helpers', () => {
|
||||
const composed = composeSubtitlePrefetchRuntime({
|
||||
subtitlePrefetchInitController: {
|
||||
cancelPendingInit: () => {},
|
||||
initSubtitlePrefetch: async () => {},
|
||||
},
|
||||
refreshSubtitleSidebarFromSource: async () => {},
|
||||
refreshSubtitlePrefetchFromActiveTrack: async () => {},
|
||||
scheduleSubtitlePrefetchRefresh: () => {},
|
||||
clearScheduledSubtitlePrefetchRefresh: () => {},
|
||||
});
|
||||
|
||||
assert.equal(typeof composed.cancelPendingInit, 'function');
|
||||
assert.equal(typeof composed.initSubtitlePrefetch, 'function');
|
||||
assert.equal(typeof composed.refreshSubtitleSidebarFromSource, 'function');
|
||||
assert.equal(typeof composed.refreshSubtitlePrefetchFromActiveTrack, 'function');
|
||||
assert.equal(typeof composed.scheduleSubtitlePrefetchRefresh, 'function');
|
||||
assert.equal(typeof composed.clearScheduledSubtitlePrefetchRefresh, 'function');
|
||||
});
|
||||
@@ -0,0 +1,32 @@
|
||||
import type { SubtitlePrefetchInitController } from '../subtitle-prefetch-init';
|
||||
import type { ComposerInputs, ComposerOutputs } from './contracts';
|
||||
|
||||
export type SubtitlePrefetchRuntimeComposerOptions = ComposerInputs<{
|
||||
subtitlePrefetchInitController: SubtitlePrefetchInitController;
|
||||
refreshSubtitleSidebarFromSource: (sourcePath: string) => Promise<void>;
|
||||
refreshSubtitlePrefetchFromActiveTrack: () => Promise<void>;
|
||||
scheduleSubtitlePrefetchRefresh: (delayMs?: number) => void;
|
||||
clearScheduledSubtitlePrefetchRefresh: () => void;
|
||||
}>;
|
||||
|
||||
export type SubtitlePrefetchRuntimeComposerResult = ComposerOutputs<{
|
||||
cancelPendingInit: () => void;
|
||||
initSubtitlePrefetch: SubtitlePrefetchInitController['initSubtitlePrefetch'];
|
||||
refreshSubtitleSidebarFromSource: (sourcePath: string) => Promise<void>;
|
||||
refreshSubtitlePrefetchFromActiveTrack: () => Promise<void>;
|
||||
scheduleSubtitlePrefetchRefresh: (delayMs?: number) => void;
|
||||
clearScheduledSubtitlePrefetchRefresh: () => void;
|
||||
}>;
|
||||
|
||||
export function composeSubtitlePrefetchRuntime(
|
||||
options: SubtitlePrefetchRuntimeComposerOptions,
|
||||
): SubtitlePrefetchRuntimeComposerResult {
|
||||
return {
|
||||
cancelPendingInit: () => options.subtitlePrefetchInitController.cancelPendingInit(),
|
||||
initSubtitlePrefetch: options.subtitlePrefetchInitController.initSubtitlePrefetch,
|
||||
refreshSubtitleSidebarFromSource: options.refreshSubtitleSidebarFromSource,
|
||||
refreshSubtitlePrefetchFromActiveTrack: options.refreshSubtitlePrefetchFromActiveTrack,
|
||||
scheduleSubtitlePrefetchRefresh: options.scheduleSubtitlePrefetchRefresh,
|
||||
clearScheduledSubtitlePrefetchRefresh: options.clearScheduledSubtitlePrefetchRefresh,
|
||||
};
|
||||
}
|
||||
76
src/main/runtime/discord-presence-runtime.test.ts
Normal file
76
src/main/runtime/discord-presence-runtime.test.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { createDiscordPresenceRuntime } from './discord-presence-runtime';
|
||||
|
||||
test('discord presence runtime refreshes duration and publishes the current snapshot', async () => {
|
||||
const snapshots: Array<Record<string, unknown>> = [];
|
||||
let mediaDurationSec: number | null = null;
|
||||
|
||||
const runtime = createDiscordPresenceRuntime({
|
||||
getDiscordPresenceService: () => ({
|
||||
publish: (snapshot: Record<string, unknown>) => {
|
||||
snapshots.push(snapshot);
|
||||
},
|
||||
}),
|
||||
isDiscordPresenceEnabled: () => true,
|
||||
getMpvClient: () =>
|
||||
({
|
||||
connected: true,
|
||||
currentTimePos: 12,
|
||||
requestProperty: async (name: string) => {
|
||||
assert.equal(name, 'duration');
|
||||
return 42;
|
||||
},
|
||||
}) as never,
|
||||
getCurrentMediaTitle: () => 'Episode 1',
|
||||
getCurrentMediaPath: () => '/media/episode-1.mkv',
|
||||
getCurrentSubtitleText: () => '字幕',
|
||||
getPlaybackPaused: () => false,
|
||||
getFallbackMediaDurationSec: () => 90,
|
||||
getSessionStartedAtMs: () => 1_000,
|
||||
getMediaDurationSec: () => mediaDurationSec,
|
||||
setMediaDurationSec: (next) => {
|
||||
mediaDurationSec = next;
|
||||
},
|
||||
});
|
||||
|
||||
await runtime.refreshDiscordPresenceMediaDuration();
|
||||
runtime.publishDiscordPresence();
|
||||
|
||||
assert.equal(mediaDurationSec, 42);
|
||||
assert.deepEqual(snapshots, [
|
||||
{
|
||||
mediaTitle: 'Episode 1',
|
||||
mediaPath: '/media/episode-1.mkv',
|
||||
subtitleText: '字幕',
|
||||
currentTimeSec: 12,
|
||||
mediaDurationSec: 42,
|
||||
paused: false,
|
||||
connected: true,
|
||||
sessionStartedAtMs: 1_000,
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
test('discord presence runtime skips publish when disabled or service missing', () => {
|
||||
let published = false;
|
||||
const runtime = createDiscordPresenceRuntime({
|
||||
getDiscordPresenceService: () => null,
|
||||
isDiscordPresenceEnabled: () => false,
|
||||
getMpvClient: () => null,
|
||||
getCurrentMediaTitle: () => null,
|
||||
getCurrentMediaPath: () => null,
|
||||
getCurrentSubtitleText: () => '',
|
||||
getPlaybackPaused: () => null,
|
||||
getFallbackMediaDurationSec: () => null,
|
||||
getSessionStartedAtMs: () => 0,
|
||||
getMediaDurationSec: () => null,
|
||||
setMediaDurationSec: () => {
|
||||
published = true;
|
||||
},
|
||||
});
|
||||
|
||||
runtime.publishDiscordPresence();
|
||||
|
||||
assert.equal(published, false);
|
||||
});
|
||||
74
src/main/runtime/discord-presence-runtime.ts
Normal file
74
src/main/runtime/discord-presence-runtime.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
type DiscordPresenceServiceLike = {
|
||||
publish: (snapshot: {
|
||||
mediaTitle: string | null;
|
||||
mediaPath: string | null;
|
||||
subtitleText: string;
|
||||
currentTimeSec: number | null;
|
||||
mediaDurationSec: number | null;
|
||||
paused: boolean | null;
|
||||
connected: boolean;
|
||||
sessionStartedAtMs: number;
|
||||
}) => void;
|
||||
};
|
||||
|
||||
type MpvClientLike = {
|
||||
connected?: boolean;
|
||||
currentTimePos?: number | null;
|
||||
requestProperty: (name: string) => Promise<unknown>;
|
||||
};
|
||||
|
||||
export type DiscordPresenceRuntimeDeps = {
|
||||
getDiscordPresenceService: () => DiscordPresenceServiceLike | null;
|
||||
isDiscordPresenceEnabled: () => boolean;
|
||||
getMpvClient: () => MpvClientLike | null;
|
||||
getCurrentMediaTitle: () => string | null;
|
||||
getCurrentMediaPath: () => string | null;
|
||||
getCurrentSubtitleText: () => string;
|
||||
getPlaybackPaused: () => boolean | null;
|
||||
getFallbackMediaDurationSec: () => number | null;
|
||||
getSessionStartedAtMs: () => number;
|
||||
getMediaDurationSec: () => number | null;
|
||||
setMediaDurationSec: (durationSec: number | null) => void;
|
||||
};
|
||||
|
||||
export function createDiscordPresenceRuntime(deps: DiscordPresenceRuntimeDeps) {
|
||||
const refreshDiscordPresenceMediaDuration = async (): Promise<void> => {
|
||||
const client = deps.getMpvClient();
|
||||
if (!client?.connected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const value = await client.requestProperty('duration');
|
||||
const numeric = Number(value);
|
||||
deps.setMediaDurationSec(Number.isFinite(numeric) && numeric > 0 ? numeric : null);
|
||||
} catch {
|
||||
deps.setMediaDurationSec(null);
|
||||
}
|
||||
};
|
||||
|
||||
const publishDiscordPresence = (): void => {
|
||||
const discordPresenceService = deps.getDiscordPresenceService();
|
||||
if (!discordPresenceService || deps.isDiscordPresenceEnabled() !== true) {
|
||||
return;
|
||||
}
|
||||
|
||||
void refreshDiscordPresenceMediaDuration();
|
||||
const client = deps.getMpvClient();
|
||||
discordPresenceService.publish({
|
||||
mediaTitle: deps.getCurrentMediaTitle(),
|
||||
mediaPath: deps.getCurrentMediaPath(),
|
||||
subtitleText: deps.getCurrentSubtitleText(),
|
||||
currentTimeSec: client?.currentTimePos ?? null,
|
||||
mediaDurationSec: deps.getMediaDurationSec() ?? deps.getFallbackMediaDurationSec(),
|
||||
paused: deps.getPlaybackPaused(),
|
||||
connected: Boolean(client?.connected),
|
||||
sessionStartedAtMs: deps.getSessionStartedAtMs(),
|
||||
});
|
||||
};
|
||||
|
||||
return {
|
||||
refreshDiscordPresenceMediaDuration,
|
||||
publishDiscordPresence,
|
||||
};
|
||||
}
|
||||
87
src/main/runtime/overlay-modal-input-state.test.ts
Normal file
87
src/main/runtime/overlay-modal-input-state.test.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { createOverlayModalInputState } from './overlay-modal-input-state';
|
||||
|
||||
function createModalWindow() {
|
||||
const calls: string[] = [];
|
||||
let destroyed = false;
|
||||
let focused = false;
|
||||
let webContentsFocused = false;
|
||||
|
||||
return {
|
||||
calls,
|
||||
setDestroyed(next: boolean) {
|
||||
destroyed = next;
|
||||
},
|
||||
setFocused(next: boolean) {
|
||||
focused = next;
|
||||
},
|
||||
setWebContentsFocused(next: boolean) {
|
||||
webContentsFocused = next;
|
||||
},
|
||||
isDestroyed: () => destroyed,
|
||||
setIgnoreMouseEvents: (ignore: boolean) => {
|
||||
calls.push(`ignore:${ignore}`);
|
||||
},
|
||||
setAlwaysOnTop: (flag: boolean, level?: string, relativeLevel?: number) => {
|
||||
calls.push(`top:${flag}:${level ?? ''}:${relativeLevel ?? ''}`);
|
||||
},
|
||||
focus: () => {
|
||||
focused = true;
|
||||
calls.push('focus');
|
||||
},
|
||||
isFocused: () => focused,
|
||||
webContents: {
|
||||
isFocused: () => webContentsFocused,
|
||||
focus: () => {
|
||||
webContentsFocused = true;
|
||||
calls.push('web-focus');
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
test('overlay modal input state activates modal window interactivity and syncs dependents', () => {
|
||||
const modalWindow = createModalWindow();
|
||||
const calls: string[] = [];
|
||||
const state = createOverlayModalInputState({
|
||||
getModalWindow: () => modalWindow as never,
|
||||
syncOverlayShortcutsForModal: (isActive) => {
|
||||
calls.push(`shortcuts:${isActive}`);
|
||||
},
|
||||
syncOverlayVisibilityForModal: () => {
|
||||
calls.push('visibility');
|
||||
},
|
||||
});
|
||||
|
||||
state.handleModalInputStateChange(true);
|
||||
|
||||
assert.equal(state.getModalInputExclusive(), true);
|
||||
assert.deepEqual(modalWindow.calls, [
|
||||
'ignore:false',
|
||||
'top:true:screen-saver:1',
|
||||
'focus',
|
||||
'web-focus',
|
||||
]);
|
||||
assert.deepEqual(calls, ['shortcuts:true', 'visibility']);
|
||||
});
|
||||
|
||||
test('overlay modal input state is idempotent for unchanged state', () => {
|
||||
const calls: string[] = [];
|
||||
const state = createOverlayModalInputState({
|
||||
getModalWindow: () => null,
|
||||
syncOverlayShortcutsForModal: (isActive) => {
|
||||
calls.push(`shortcuts:${isActive}`);
|
||||
},
|
||||
syncOverlayVisibilityForModal: () => {
|
||||
calls.push('visibility');
|
||||
},
|
||||
});
|
||||
|
||||
state.handleModalInputStateChange(false);
|
||||
state.handleModalInputStateChange(true);
|
||||
state.handleModalInputStateChange(true);
|
||||
|
||||
assert.equal(state.getModalInputExclusive(), true);
|
||||
assert.deepEqual(calls, ['shortcuts:true', 'visibility']);
|
||||
});
|
||||
38
src/main/runtime/overlay-modal-input-state.ts
Normal file
38
src/main/runtime/overlay-modal-input-state.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type { BrowserWindow } from 'electron';
|
||||
|
||||
export type OverlayModalInputStateDeps = {
|
||||
getModalWindow: () => BrowserWindow | null;
|
||||
syncOverlayShortcutsForModal: (isActive: boolean) => void;
|
||||
syncOverlayVisibilityForModal: () => void;
|
||||
};
|
||||
|
||||
export function createOverlayModalInputState(deps: OverlayModalInputStateDeps) {
|
||||
let modalInputExclusive = false;
|
||||
|
||||
const handleModalInputStateChange = (isActive: boolean): void => {
|
||||
if (modalInputExclusive === isActive) {
|
||||
return;
|
||||
}
|
||||
|
||||
modalInputExclusive = isActive;
|
||||
if (isActive) {
|
||||
const modalWindow = deps.getModalWindow();
|
||||
if (modalWindow && !modalWindow.isDestroyed()) {
|
||||
modalWindow.setIgnoreMouseEvents(false);
|
||||
modalWindow.setAlwaysOnTop(true, 'screen-saver', 1);
|
||||
modalWindow.focus();
|
||||
if (!modalWindow.webContents.isFocused()) {
|
||||
modalWindow.webContents.focus();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deps.syncOverlayShortcutsForModal(isActive);
|
||||
deps.syncOverlayVisibilityForModal();
|
||||
};
|
||||
|
||||
return {
|
||||
getModalInputExclusive: (): boolean => modalInputExclusive,
|
||||
handleModalInputStateChange,
|
||||
};
|
||||
}
|
||||
59
src/main/runtime/subtitle-prefetch-runtime.test.ts
Normal file
59
src/main/runtime/subtitle-prefetch-runtime.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { createResolveActiveSubtitleSidebarSourceHandler } from './subtitle-prefetch-runtime';
|
||||
|
||||
test('subtitle prefetch runtime resolves direct external subtitle sources first', async () => {
|
||||
const resolveSource = createResolveActiveSubtitleSidebarSourceHandler({
|
||||
getFfmpegPath: () => 'ffmpeg',
|
||||
extractInternalSubtitleTrack: async () => {
|
||||
throw new Error('should not extract external tracks');
|
||||
},
|
||||
});
|
||||
|
||||
const resolved = await resolveSource({
|
||||
currentExternalFilenameRaw: ' /tmp/current.ass ',
|
||||
currentTrackRaw: null,
|
||||
trackListRaw: null,
|
||||
sidRaw: null,
|
||||
videoPath: '/media/video.mkv',
|
||||
});
|
||||
|
||||
assert.deepEqual(resolved, {
|
||||
path: '/tmp/current.ass',
|
||||
sourceKey: '/tmp/current.ass',
|
||||
});
|
||||
});
|
||||
|
||||
test('subtitle prefetch runtime extracts internal subtitle tracks into a stable source key', async () => {
|
||||
const resolveSource = createResolveActiveSubtitleSidebarSourceHandler({
|
||||
getFfmpegPath: () => 'ffmpeg-custom',
|
||||
extractInternalSubtitleTrack: async (ffmpegPath, videoPath, track) => {
|
||||
assert.equal(ffmpegPath, 'ffmpeg-custom');
|
||||
assert.equal(videoPath, '/media/video.mkv');
|
||||
assert.equal((track as Record<string, unknown>)['ff-index'], 7);
|
||||
return {
|
||||
path: '/tmp/subminer-sidebar-123/track_7.ass',
|
||||
cleanup: async () => {},
|
||||
};
|
||||
},
|
||||
});
|
||||
|
||||
const resolved = await resolveSource({
|
||||
currentExternalFilenameRaw: null,
|
||||
currentTrackRaw: {
|
||||
type: 'sub',
|
||||
id: 3,
|
||||
'ff-index': 7,
|
||||
codec: 'ass',
|
||||
},
|
||||
trackListRaw: [],
|
||||
sidRaw: 3,
|
||||
videoPath: '/media/video.mkv',
|
||||
});
|
||||
|
||||
assert.deepEqual(resolved, {
|
||||
path: '/tmp/subminer-sidebar-123/track_7.ass',
|
||||
sourceKey: 'internal:/media/video.mkv:track:3:ff:7',
|
||||
cleanup: resolved?.cleanup,
|
||||
});
|
||||
});
|
||||
180
src/main/runtime/subtitle-prefetch-runtime.ts
Normal file
180
src/main/runtime/subtitle-prefetch-runtime.ts
Normal file
@@ -0,0 +1,180 @@
|
||||
import type { SubtitlePrefetchInitController } from './subtitle-prefetch-init';
|
||||
import { buildSubtitleSidebarSourceKey } from './subtitle-prefetch-source';
|
||||
|
||||
type MpvSubtitleTrackLike = {
|
||||
type?: unknown;
|
||||
id?: unknown;
|
||||
selected?: unknown;
|
||||
external?: unknown;
|
||||
codec?: unknown;
|
||||
'ff-index'?: unknown;
|
||||
'external-filename'?: unknown;
|
||||
};
|
||||
|
||||
type ActiveSubtitleSidebarSource = {
|
||||
path: string;
|
||||
sourceKey: string;
|
||||
cleanup?: () => Promise<void>;
|
||||
};
|
||||
|
||||
function parseTrackId(value: unknown): number | null {
|
||||
if (typeof value === 'number' && Number.isInteger(value)) {
|
||||
return value;
|
||||
}
|
||||
if (typeof value === 'string') {
|
||||
const parsed = Number(value.trim());
|
||||
return Number.isInteger(parsed) ? parsed : null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function getActiveSubtitleTrack(
|
||||
currentTrackRaw: unknown,
|
||||
trackListRaw: unknown,
|
||||
sidRaw: unknown,
|
||||
): MpvSubtitleTrackLike | null {
|
||||
if (currentTrackRaw && typeof currentTrackRaw === 'object') {
|
||||
const track = currentTrackRaw as MpvSubtitleTrackLike;
|
||||
if (track.type === undefined || track.type === 'sub') {
|
||||
return track;
|
||||
}
|
||||
}
|
||||
|
||||
const sid = parseTrackId(sidRaw);
|
||||
if (!Array.isArray(trackListRaw)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const bySid =
|
||||
sid === null
|
||||
? null
|
||||
: ((trackListRaw.find((entry: unknown) => {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const track = entry as MpvSubtitleTrackLike;
|
||||
return track.type === 'sub' && parseTrackId(track.id) === sid;
|
||||
}) as MpvSubtitleTrackLike | undefined) ?? null);
|
||||
if (bySid) {
|
||||
return bySid;
|
||||
}
|
||||
|
||||
return (
|
||||
(trackListRaw.find((entry: unknown) => {
|
||||
if (!entry || typeof entry !== 'object') {
|
||||
return false;
|
||||
}
|
||||
const track = entry as MpvSubtitleTrackLike;
|
||||
return track.type === 'sub' && track.selected === true;
|
||||
}) as MpvSubtitleTrackLike | undefined) ?? null
|
||||
);
|
||||
}
|
||||
|
||||
export function createResolveActiveSubtitleSidebarSourceHandler(deps: {
|
||||
getFfmpegPath: () => string;
|
||||
extractInternalSubtitleTrack: (
|
||||
ffmpegPath: string,
|
||||
videoPath: string,
|
||||
track: MpvSubtitleTrackLike,
|
||||
) => Promise<{ path: string; cleanup: () => Promise<void> } | null>;
|
||||
}) {
|
||||
return async (input: {
|
||||
currentExternalFilenameRaw: unknown;
|
||||
currentTrackRaw: unknown;
|
||||
trackListRaw: unknown;
|
||||
sidRaw: unknown;
|
||||
videoPath: string;
|
||||
}): Promise<ActiveSubtitleSidebarSource | null> => {
|
||||
const currentExternalFilename =
|
||||
typeof input.currentExternalFilenameRaw === 'string'
|
||||
? input.currentExternalFilenameRaw.trim()
|
||||
: '';
|
||||
if (currentExternalFilename) {
|
||||
return { path: currentExternalFilename, sourceKey: currentExternalFilename };
|
||||
}
|
||||
|
||||
const track = getActiveSubtitleTrack(input.currentTrackRaw, input.trackListRaw, input.sidRaw);
|
||||
if (!track) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const externalFilename =
|
||||
typeof track['external-filename'] === 'string' ? track['external-filename'].trim() : '';
|
||||
if (externalFilename) {
|
||||
return { path: externalFilename, sourceKey: externalFilename };
|
||||
}
|
||||
|
||||
const extracted = await deps.extractInternalSubtitleTrack(
|
||||
deps.getFfmpegPath(),
|
||||
input.videoPath,
|
||||
track,
|
||||
);
|
||||
if (!extracted) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
...extracted,
|
||||
sourceKey: buildSubtitleSidebarSourceKey(input.videoPath, track, extracted.path),
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export function createRefreshSubtitlePrefetchFromActiveTrackHandler(deps: {
|
||||
getMpvClient: () => {
|
||||
connected?: boolean;
|
||||
requestProperty: (name: string) => Promise<unknown>;
|
||||
} | null;
|
||||
getLastObservedTimePos: () => number;
|
||||
subtitlePrefetchInitController: SubtitlePrefetchInitController;
|
||||
resolveActiveSubtitleSidebarSource: (
|
||||
input: Parameters<ReturnType<typeof createResolveActiveSubtitleSidebarSourceHandler>>[0],
|
||||
) => Promise<ActiveSubtitleSidebarSource | null>;
|
||||
}) {
|
||||
return async (): Promise<void> => {
|
||||
const client = deps.getMpvClient();
|
||||
if (!client?.connected) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const [currentExternalFilenameRaw, currentTrackRaw, trackListRaw, sidRaw, videoPathRaw] =
|
||||
await Promise.all([
|
||||
client.requestProperty('current-tracks/sub/external-filename').catch(() => null),
|
||||
client.requestProperty('current-tracks/sub').catch(() => null),
|
||||
client.requestProperty('track-list'),
|
||||
client.requestProperty('sid'),
|
||||
client.requestProperty('path'),
|
||||
]);
|
||||
const videoPath = typeof videoPathRaw === 'string' ? videoPathRaw : '';
|
||||
if (!videoPath) {
|
||||
deps.subtitlePrefetchInitController.cancelPendingInit();
|
||||
return;
|
||||
}
|
||||
|
||||
const resolvedSource = await deps.resolveActiveSubtitleSidebarSource({
|
||||
currentExternalFilenameRaw,
|
||||
currentTrackRaw,
|
||||
trackListRaw,
|
||||
sidRaw,
|
||||
videoPath,
|
||||
});
|
||||
if (!resolvedSource) {
|
||||
deps.subtitlePrefetchInitController.cancelPendingInit();
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await deps.subtitlePrefetchInitController.initSubtitlePrefetch(
|
||||
resolvedSource.path,
|
||||
deps.getLastObservedTimePos(),
|
||||
resolvedSource.sourceKey,
|
||||
);
|
||||
} finally {
|
||||
await resolvedSource.cleanup?.();
|
||||
}
|
||||
} catch {
|
||||
// Skip refresh when the track query fails.
|
||||
}
|
||||
};
|
||||
}
|
||||
80
src/main/runtime/youtube-playback-runtime.test.ts
Normal file
80
src/main/runtime/youtube-playback-runtime.test.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { createYoutubePlaybackRuntime } from './youtube-playback-runtime';
|
||||
|
||||
test('youtube playback runtime resets flow ownership after a successful run', async () => {
|
||||
const calls: string[] = [];
|
||||
let appOwnedFlowInFlight = false;
|
||||
let timeoutCallback: (() => void) | null = null;
|
||||
|
||||
const runtime = createYoutubePlaybackRuntime({
|
||||
platform: 'linux',
|
||||
directPlaybackFormat: 'best',
|
||||
mpvYtdlFormat: 'bestvideo+bestaudio',
|
||||
autoLaunchTimeoutMs: 2_000,
|
||||
connectTimeoutMs: 1_000,
|
||||
socketPath: '/tmp/mpv.sock',
|
||||
getMpvConnected: () => true,
|
||||
invalidatePendingAutoplayReadyFallbacks: () => {
|
||||
calls.push('invalidate-autoplay');
|
||||
},
|
||||
setAppOwnedFlowInFlight: (next) => {
|
||||
appOwnedFlowInFlight = next;
|
||||
calls.push(`app-owned:${next}`);
|
||||
},
|
||||
ensureYoutubePlaybackRuntimeReady: async () => {
|
||||
calls.push('ensure-runtime-ready');
|
||||
},
|
||||
resolveYoutubePlaybackUrl: async () => {
|
||||
throw new Error('linux path should not resolve direct playback url');
|
||||
},
|
||||
launchWindowsMpv: () => ({ ok: false }),
|
||||
waitForYoutubeMpvConnected: async (timeoutMs) => {
|
||||
calls.push(`wait-connected:${timeoutMs}`);
|
||||
return true;
|
||||
},
|
||||
prepareYoutubePlaybackInMpv: async ({ url }) => {
|
||||
calls.push(`prepare:${url}`);
|
||||
return true;
|
||||
},
|
||||
runYoutubePlaybackFlow: async ({ url, mode }) => {
|
||||
calls.push(`run-flow:${url}:${mode}`);
|
||||
},
|
||||
logInfo: (message) => {
|
||||
calls.push(`info:${message}`);
|
||||
},
|
||||
logWarn: (message) => {
|
||||
calls.push(`warn:${message}`);
|
||||
},
|
||||
schedule: (callback) => {
|
||||
timeoutCallback = callback;
|
||||
calls.push('schedule-arm');
|
||||
return 1 as never;
|
||||
},
|
||||
clearScheduled: () => {
|
||||
calls.push('clear-scheduled');
|
||||
},
|
||||
});
|
||||
|
||||
await runtime.runYoutubePlaybackFlow({
|
||||
url: 'https://youtu.be/demo',
|
||||
mode: 'download',
|
||||
source: 'initial',
|
||||
});
|
||||
|
||||
assert.equal(appOwnedFlowInFlight, false);
|
||||
assert.equal(runtime.getQuitOnDisconnectArmed(), false);
|
||||
assert.deepEqual(calls.slice(0, 6), [
|
||||
'invalidate-autoplay',
|
||||
'app-owned:true',
|
||||
'ensure-runtime-ready',
|
||||
'wait-connected:1000',
|
||||
'schedule-arm',
|
||||
'prepare:https://youtu.be/demo',
|
||||
]);
|
||||
|
||||
assert.ok(timeoutCallback);
|
||||
const scheduledCallback = timeoutCallback as () => void;
|
||||
scheduledCallback();
|
||||
assert.equal(runtime.getQuitOnDisconnectArmed(), true);
|
||||
});
|
||||
149
src/main/runtime/youtube-playback-runtime.ts
Normal file
149
src/main/runtime/youtube-playback-runtime.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import type { CliArgs, CliCommandSource } from '../../cli/args';
|
||||
|
||||
type LaunchResult = {
|
||||
ok: boolean;
|
||||
mpvPath?: string;
|
||||
};
|
||||
|
||||
export type YoutubePlaybackRuntimeDeps = {
|
||||
platform: NodeJS.Platform;
|
||||
directPlaybackFormat: string;
|
||||
mpvYtdlFormat: string;
|
||||
autoLaunchTimeoutMs: number;
|
||||
connectTimeoutMs: number;
|
||||
socketPath: string;
|
||||
getMpvConnected: () => boolean;
|
||||
invalidatePendingAutoplayReadyFallbacks: () => void;
|
||||
setAppOwnedFlowInFlight: (next: boolean) => void;
|
||||
ensureYoutubePlaybackRuntimeReady: () => Promise<void>;
|
||||
resolveYoutubePlaybackUrl: (url: string, format: string) => Promise<string>;
|
||||
launchWindowsMpv: (playbackUrl: string, args: string[]) => LaunchResult;
|
||||
waitForYoutubeMpvConnected: (timeoutMs: number) => Promise<boolean>;
|
||||
prepareYoutubePlaybackInMpv: (request: { url: string }) => Promise<boolean>;
|
||||
runYoutubePlaybackFlow: (request: {
|
||||
url: string;
|
||||
mode: NonNullable<CliArgs['youtubeMode']>;
|
||||
}) => Promise<void>;
|
||||
logInfo: (message: string) => void;
|
||||
logWarn: (message: string) => void;
|
||||
schedule: (callback: () => void, delayMs: number) => ReturnType<typeof setTimeout>;
|
||||
clearScheduled: (timer: ReturnType<typeof setTimeout>) => void;
|
||||
};
|
||||
|
||||
export function createYoutubePlaybackRuntime(deps: YoutubePlaybackRuntimeDeps) {
|
||||
let quitOnDisconnectArmed = false;
|
||||
let quitOnDisconnectArmTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let playbackFlowGeneration = 0;
|
||||
|
||||
const clearYoutubePlayQuitOnDisconnectArmTimer = (): void => {
|
||||
if (quitOnDisconnectArmTimer) {
|
||||
deps.clearScheduled(quitOnDisconnectArmTimer);
|
||||
quitOnDisconnectArmTimer = null;
|
||||
}
|
||||
};
|
||||
|
||||
const runYoutubePlaybackFlow = async (request: {
|
||||
url: string;
|
||||
mode: NonNullable<CliArgs['youtubeMode']>;
|
||||
source: CliCommandSource;
|
||||
}): Promise<void> => {
|
||||
const flowGeneration = ++playbackFlowGeneration;
|
||||
deps.invalidatePendingAutoplayReadyFallbacks();
|
||||
deps.setAppOwnedFlowInFlight(true);
|
||||
let flowCompleted = false;
|
||||
|
||||
try {
|
||||
clearYoutubePlayQuitOnDisconnectArmTimer();
|
||||
quitOnDisconnectArmed = false;
|
||||
await deps.ensureYoutubePlaybackRuntimeReady();
|
||||
|
||||
let playbackUrl = request.url;
|
||||
let launchedWindowsMpv = false;
|
||||
if (deps.platform === 'win32') {
|
||||
try {
|
||||
playbackUrl = await deps.resolveYoutubePlaybackUrl(
|
||||
request.url,
|
||||
deps.directPlaybackFormat,
|
||||
);
|
||||
deps.logInfo('Resolved direct YouTube playback URL for Windows MPV startup.');
|
||||
} catch (error) {
|
||||
deps.logWarn(
|
||||
`Failed to resolve direct YouTube playback URL; falling back to page URL: ${
|
||||
error instanceof Error ? error.message : String(error)
|
||||
}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (deps.platform === 'win32' && !deps.getMpvConnected()) {
|
||||
const launchResult = deps.launchWindowsMpv(playbackUrl, [
|
||||
'--pause=yes',
|
||||
'--ytdl=yes',
|
||||
`--ytdl-format=${deps.mpvYtdlFormat}`,
|
||||
'--sub-auto=no',
|
||||
'--sub-file-paths=.;subs;subtitles',
|
||||
'--sid=auto',
|
||||
'--secondary-sid=auto',
|
||||
'--secondary-sub-visibility=no',
|
||||
'--alang=ja,jp,jpn,japanese,en,eng,english,enus,en-us',
|
||||
'--slang=ja,jp,jpn,japanese,en,eng,english,enus,en-us',
|
||||
`--input-ipc-server=${deps.socketPath}`,
|
||||
]);
|
||||
launchedWindowsMpv = launchResult.ok;
|
||||
if (launchResult.ok && launchResult.mpvPath) {
|
||||
deps.logInfo(`Bootstrapping Windows mpv for YouTube playback via ${launchResult.mpvPath}`);
|
||||
}
|
||||
if (!launchResult.ok) {
|
||||
deps.logWarn('Unable to bootstrap Windows mpv for YouTube playback.');
|
||||
}
|
||||
}
|
||||
|
||||
const connected = await deps.waitForYoutubeMpvConnected(
|
||||
launchedWindowsMpv ? deps.autoLaunchTimeoutMs : deps.connectTimeoutMs,
|
||||
);
|
||||
if (!connected) {
|
||||
throw new Error(
|
||||
launchedWindowsMpv
|
||||
? 'MPV not connected after auto-launch. Ensure mpv is installed and can open the requested YouTube URL.'
|
||||
: 'MPV not connected. Start mpv with the SubMiner profile or retry after mpv finishes starting.',
|
||||
);
|
||||
}
|
||||
|
||||
if (request.source === 'initial') {
|
||||
quitOnDisconnectArmTimer = deps.schedule(() => {
|
||||
if (playbackFlowGeneration !== flowGeneration) {
|
||||
return;
|
||||
}
|
||||
quitOnDisconnectArmed = true;
|
||||
quitOnDisconnectArmTimer = null;
|
||||
}, 3000);
|
||||
}
|
||||
|
||||
const mediaReady = await deps.prepareYoutubePlaybackInMpv({ url: playbackUrl });
|
||||
if (!mediaReady) {
|
||||
throw new Error('Timed out waiting for mpv to load the requested YouTube URL.');
|
||||
}
|
||||
|
||||
await deps.runYoutubePlaybackFlow({
|
||||
url: request.url,
|
||||
mode: request.mode,
|
||||
});
|
||||
flowCompleted = true;
|
||||
deps.logInfo(`YouTube playback flow completed from ${request.source}.`);
|
||||
} finally {
|
||||
if (playbackFlowGeneration === flowGeneration) {
|
||||
if (!flowCompleted) {
|
||||
clearYoutubePlayQuitOnDisconnectArmTimer();
|
||||
quitOnDisconnectArmed = false;
|
||||
}
|
||||
deps.setAppOwnedFlowInFlight(false);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
clearYoutubePlayQuitOnDisconnectArmTimer,
|
||||
getQuitOnDisconnectArmed: (): boolean => quitOnDisconnectArmed,
|
||||
runYoutubePlaybackFlow,
|
||||
};
|
||||
}
|
||||
Reference in New Issue
Block a user