24 Commits

Author SHA1 Message Date
ba9bae63e4 test: address latest review feedback 2026-03-22 20:28:45 -07:00
415c758840 test: force x11 backend in launcher ci harness 2026-03-22 20:19:06 -07:00
ff72976bae test: stub launcher youtube deps in failing case 2026-03-22 20:17:07 -07:00
0def04b09c test: isolate launcher youtube flow deps 2026-03-22 20:14:53 -07:00
6bf148514e test: stub launcher youtube deps in CI 2026-03-22 20:12:59 -07:00
07b91f8704 style: format stats library files 2026-03-22 20:10:23 -07:00
d8a7ae77b0 fix: address latest review feedback 2026-03-22 20:09:16 -07:00
809b57af44 style: format stats library tab 2026-03-22 19:40:28 -07:00
ef716b82c7 fix: persist canonical title from youtube metadata 2026-03-22 19:40:12 -07:00
d65575c80d fix: address CodeRabbit review feedback 2026-03-22 19:37:49 -07:00
8da3a26855 fix(ci): add changelog fragment for immersion changes 2026-03-22 19:07:07 -07:00
8928bfdf7e chore: add shared log-file source for diagnostics 2026-03-22 18:38:58 -07:00
16f7b2507b feat: update subtitle sidebar overlay behavior 2026-03-22 18:38:56 -07:00
7d8d2ae7a7 refactor: unify cli and runtime wiring for startup and youtube flow 2026-03-22 18:38:54 -07:00
3fb33af116 docs: update docs for youtube subtitle and mining flow 2026-03-22 18:38:51 -07:00
8ddace5536 fix: unwrap mpv youtube streams for anki media mining 2026-03-22 18:34:38 -07:00
e7242d006f fix: align youtube playback with shared overlay startup 2026-03-22 18:34:25 -07:00
7666a094f4 fix: harden preload argv parsing for popup windows 2026-03-22 18:34:16 -07:00
0317c7f011 docs: add WebSocket & Texthooker API integration guide (#30) 2026-03-22 02:48:54 -07:00
13797b5005 docs: align v0.8.0 release notes with subtitle sidebar changes 2026-03-22 00:07:05 -07:00
b24d9d7487 fix(release): make changelog build idempotent for re-run tagged releases 2026-03-21 23:50:27 -07:00
3a01cffc6b feat(subtitle-sidebar): add sidebar config surface (#28) 2026-03-21 23:37:42 -07:00
eddf6f0456 docs: document release changelog recovery path 2026-03-20 03:15:05 -07:00
f6c024d61e fix: build changelog artifacts in release job 2026-03-20 03:00:40 -07:00
174 changed files with 13359 additions and 1000 deletions

View File

@@ -334,6 +334,14 @@ jobs:
id: version id: version
run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> "$GITHUB_OUTPUT" run: echo "VERSION=${GITHUB_REF#refs/tags/}" >> "$GITHUB_OUTPUT"
- name: Build changelog artifacts for release
run: |
if find changes -maxdepth 1 -name '*.md' -not -name README.md -print -quit | grep -q .; then
bun run changelog:build --version "${{ steps.version.outputs.VERSION }}"
else
echo "No pending changelog fragments found."
fi
- name: Verify changelog is ready for tagged release - name: Verify changelog is ready for tagged release
run: bun run changelog:check --version "${{ steps.version.outputs.VERSION }}" run: bun run changelog:check --version "${{ steps.version.outputs.VERSION }}"

View File

@@ -1,5 +1,31 @@
# Changelog # Changelog
## v0.9.0 (2026-03-22)
### Changed
- Subtitle Sidebar: Added subtitle sidebar state and behavior updates, including startup-auto-open controls and resume positioning improvements.
- Subtitle Sidebar: Fixed subtitle prefetch and embedded overlay passthrough sync between sidebar and overlay subtitle rendering.
- Launcher: Added an app-owned YouTube subtitle picker flow that boots mpv paused, opens an overlay track picker, and downloads selected subtitles into external files.
- Launcher: Added explicit `download` and `generate` YouTube subtitle modes with `download` as the default path.
- Launcher: Disabled mpv native YouTube subtitle auto-loading for the app-owned flow so external subtitle files stay authoritative.
- Launcher: Added OSD status messages for YouTube playback startup, subtitle acquisition, and subtitle loading so the flow stays visible before and during the picker.
## v0.8.0 (2026-03-22)
### Added
- Overlay: Added the subtitle sidebar feature with a new `subtitleSidebar` configuration surface and rendered sidebar modal with cue list rendering, click-to-seek, active-cue highlighting, and embedded layout support.
- IPC: Added sidebar snapshot plumbing between renderer and main process for overlay/sidebar synchronization.
### Changed
- Config: Added hot-reloadable sidebar options for enablement, layout, visibility, typography, opacity, sizing, and interaction behavior (`autoOpen`, `pauseOnHover`, `autoScroll`, toggle key).
- Docs: Added full `subtitleSidebar` documentation coverage, including sample config, option table, and toggle shortcut notes.
- Runtime: Improved subtitle prefetch/rendering flow so sidebar and overlay subtitle states stay in sync across media transitions.
### Fixed
- Overlay: Kept sidebar cue tracking stable across playback transitions and timing edge cases.
- Overlay: Improved sidebar resume/start behavior to jump directly to the first resolved active cue.
- Overlay: Stopped stale subtitle refreshes from regressing active-cue and text state.
## v0.7.0 (2026-03-19) ## v0.7.0 (2026-03-19)
### Added ### Added

216
README.md
View File

@@ -1,60 +1,159 @@
<div align="center"> <div align="center">
<img src="assets/SubMiner.png" width="140" alt="SubMiner logo">
<img src="assets/SubMiner.png" width="160" alt="SubMiner logo">
# SubMiner # SubMiner
**Sentence-mine from mpv — look up words, one-key Anki export, immersion tracking.** ## Turn mpv into a sentence-mining workstation.
[![License: GPL v3](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://www.gnu.org/licenses/gpl-3.0) Look up words with Yomitan, export to Anki in one key, track your immersion — all without leaving mpv.
[![Linux](https://img.shields.io/badge/platform-Linux%20%7C%20macOS%20%7C%20Windows-informational)](https://github.com/ksyasuda/SubMiner)
[![Docs](https://img.shields.io/badge/docs-docs.subminer.moe-blueviolet)](https://docs.subminer.moe) [![License: GPL v3](https://img.shields.io/badge/license-GPLv3-1a1a2e?style=flat-square)](https://www.gnu.org/licenses/gpl-3.0)
[![AUR](https://img.shields.io/aur/version/subminer-bin)](https://aur.archlinux.org/packages/subminer-bin) [![Platform](https://img.shields.io/badge/platform-Linux%20·%20macOS%20·%20Windows-1a1a2e?style=flat-square)](https://github.com/ksyasuda/SubMiner)
[![Docs](https://img.shields.io/badge/docs-docs.subminer.moe-e6a817?style=flat-square)](https://docs.subminer.moe)
[![AUR](https://img.shields.io/aur/version/subminer-bin?style=flat-square&color=1a1a2e)](https://aur.archlinux.org/packages/subminer-bin)
[![SubMiner demo](./assets/minecard.webp)](./assets/minecard.mp4)
</div> </div>
--- ## How It Works
SubMiner is an Electron overlay for [mpv](https://mpv.io) that turns video into a sentence-mining workstation. Look up any word with [Yomitan](https://github.com/yomidevs/yomitan), mine it to Anki with one key, and track your immersion over time. SubMiner runs as an invisible Electron overlay on top of mpv. Subtitles render as an interactive layer. Move your cursor over any word and trigger a [Yomitan](https://github.com/yomidevs/yomitan) lookup. Press one key to snapshot the sentence, audio, and screenshot into Anki via AnkiConnect.
<div align="center">
[![SubMiner demo (Animated preview)](./assets/minecard.webp)](./assets/minecard.mp4)
</div>
## Features ## Features
**Dictionary lookups** — Yomitan runs inside the overlay. Hover or navigate to any word for full dictionary popups without leaving mpv. ### Dictionary Lookups
**One-key Anki mining** — Press one key to create a card with the sentence, audio clip, screenshot, and machine translation from the exact playback moment. Yomitan runs inside the overlay. Trigger a lookup on any word for full dictionary popups — definitions, pitch accent, frequency data — without ever leaving mpv.
<div align="center"> <div align="center">
<img src="docs-site/public/screenshots/yomitan-lookup.png" width="800" alt="Yomitan popup with dictionary entry and mine button over annotated subtitles in mpv"> <img src="docs-site/public/screenshots/yomitan-lookup.png" width="800" alt="Yomitan dictionary popup over annotated subtitles in mpv">
</div> </div>
**Reading annotations** — Real-time subtitle annotations with N+1 targeting, frequency highlighting, JLPT tags, and a character name dictionary. Grammar-only tokens render as plain text. <br>
### Instant Anki Mining
Create an Anki card with the sentence, audio clip, screenshot, and machine translation from the exact playback moment with one key press, click, or controller input.
<div align="center"> <div align="center">
<img src="docs-site/public/screenshots/annotations.png" width="800" alt="Annotated subtitles with frequency highlighting, JLPT underlines, known words, and N+1 targets"> <img src="docs-site/public/screenshots/one-key-mining.png" width="800" alt="Anki card created from SubMiner with sentence, audio, and screenshot">
</div> </div>
**Immersion dashboard** — Local stats dashboard with watch time, anime progress, vocabulary growth, mining throughput, and session history. <br>
### Reading Annotations
Real-time subtitle annotations with frequency highlighting, JLPT tags, N+1 targeting, and a character name dictionary. Known words fade back; new words stand out. Grammar-only tokens render as plain text so you focus on what matters.
<div align="center"> <div align="center">
<img src="docs-site/public/screenshots/stats-overview.png" width="800" alt="Stats dashboard with watch time, cards mined, streaks, and tracking snapshot"> <img src="docs-site/public/screenshots/annotations.png" width="800" alt="Annotated subtitles with frequency coloring, JLPT underlines, and N+1 targets">
</div> </div>
**Integrations** — AniList episode tracking, Jellyfin remote playback, Jimaku subtitle downloads, alass/ffsubsync, and an annotated websocket feed for external clients. <br>
### Immersion Dashboard
Local stats dashboard — watch time, anime library, vocabulary growth, mining throughput, session history, and trends. All stored locally, no third-party tracking.
<div align="center"> <div align="center">
<img src="docs-site/public/screenshots/texthooker.png" width="800" alt="Texthooker page with annotated subtitle lines and frequency highlighting"> <img src="docs-site/public/screenshots/stats-overview.png" width="800" alt="Stats dashboard showing watch time, cards mined, streaks, and tracking data">
</div> </div>
<br>
### Integrations
<table>
<tr>
<td><b>AniList</b></td>
<td>Automatic episode tracking and progress sync</td>
</tr>
<tr>
<td><b>Jellyfin</b></td>
<td>Browse and launch media from your Jellyfin server</td>
</tr>
<tr>
<td><b>Jimaku</b></td>
<td>Search and download Japanese subtitles</td>
</tr>
<tr>
<td><b>alass / ffsubsync</b></td>
<td>Automatic subtitle retiming</td>
</tr>
<tr>
<td><b>WebSocket</b></td>
<td>Annotated subtitle feed for external clients (texthooker pages, custom tools)</td>
</tr>
</table>
<div align="center">
<img src="docs-site/public/screenshots/texthooker.png" width="800" alt="Texthooker page receiving annotated subtitle lines via WebSocket">
</div>
<br>
---
## Requirements
| | Required | Optional |
| -------------- | --------------------------------------- | -------------------------------------- |
| **Player** | [`mpv`](https://mpv.io) with IPC socket | — |
| **Processing** | `ffmpeg`, `mecab` + `mecab-ipadic` | `guessit` (AniSkip) |
| **Media** | — | `yt-dlp`, `chafa`, `ffmpegthumbnailer` |
| **Selection** | — | `fzf` / `rofi` |
> [!NOTE]
> [`bun`](https://bun.sh) is required if building from source or using the CLI wrapper: `subminer`. Pre-built releases (AppImage, DMG, installer) do not require it.
**Platform-specific:**
| Linux | macOS | Windows |
| ----------------------------------- | ------------------------ | ------------- |
| `hyprctl` or `xdotool` + `xwininfo` | Accessibility permission | No extra deps |
<details>
<summary><b>Arch Linux</b></summary>
```bash
paru -S --needed mpv ffmpeg mecab-git mecab-ipadic
# Optional
paru -S --needed yt-dlp fzf rofi chafa ffmpegthumbnailer xdotool xorg-xwininfo
# X11 / XWAYLAND
paru -S --needed xdotool xorg-xwininfo
```
</details>
<details>
<summary><b>macOS</b></summary>
```bash
brew install mpv ffmpeg mecab mecab-ipadic
# Optional
brew install yt-dlp fzf rofi chafa ffmpegthumbnailer
```
Grant Accessibility permission to SubMiner in **System Settings > Privacy & Security > Accessibility**.
</details>
<details>
<summary><b>Windows</b></summary>
Install [`mpv`](https://mpv.io/installation/) and [`ffmpeg`](https://ffmpeg.org/download.html) and ensure both are on your `PATH`.
For MeCab, install [MeCab for Windows](https://taku910.github.io/mecab/#download) with the UTF-8 dictionary.
</details>
--- ---
## Quick Start ## Quick Start
### Install ### 1. Install
<details> <details>
<summary><b>Arch Linux (AUR)</b></summary> <summary><b>Arch Linux (AUR)</b></summary>
@@ -88,53 +187,62 @@ wget https://github.com/ksyasuda/SubMiner/releases/latest/download/subminer -O ~
</details> </details>
<details> <details>
<summary><b>macOS / Windows / From source</b></summary> <summary><b>macOS</b></summary>
**macOS**Download the latest DMG/ZIP from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest) and drag `SubMiner.app` into `/Applications`. Download the latest DMG or ZIP from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest) and drag `SubMiner.app` into `/Applications`.
**Windows** — Download the latest installer or portable `.zip` from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest). Keep `mpv` on `PATH`.
**From source** — See [docs.subminer.moe/installation#from-source](https://docs.subminer.moe/installation#from-source).
</details> </details>
### First Launch <details>
<summary><b>Windows</b></summary>
Run `SubMiner.AppImage` (Linux), `SubMiner.app` (macOS), or `SubMiner.exe` (Windows). On first launch, SubMiner starts in the tray, creates a default config, and opens a setup popup where you can install the mpv plugin and configure Yomitan dictionaries. Download the latest installer or portable `.zip` from [GitHub Releases](https://github.com/ksyasuda/SubMiner/releases/latest). Make sure `mpv` is on your `PATH`.
### Mine </details>
<details>
<summary><b>From source</b></summary>
See the [build-from-source guide](https://docs.subminer.moe/installation#from-source).
</details>
### 2. First Launch
Run the app. On first launch SubMiner starts in the system tray, creates a default config, and opens a setup popup to install the mpv plugin and configure Yomitan dictionaries.
### 3. Mine
```bash ```bash
subminer video.mkv # auto-starts overlay + resumes playback subminer video.mkv # play video with overlay
subminer --start video.mkv # explicit overlay start (if plugin auto_start=no) subminer --start video.mkv # explicit overlay start
subminer stats # open the immersion dashboard subminer stats # open immersion dashboard
subminer stats -b # keep the stats daemon running in background subminer stats -b # stats daemon in background
subminer stats -s # stop the dedicated stats daemon
subminer stats cleanup # repair/prune stored stats vocabulary rows
``` ```
--- ---
## Requirements
| Required | Optional |
| ------------------------------------------------------ | ----------------------------- |
| [`mpv`](https://mpv.io) with IPC socket | `yt-dlp` |
| `ffmpeg` | `guessit` (AniSkip detection) |
| `mecab` + `mecab-ipadic` | `fzf` / `rofi` |
| [`bun`](https://bun.sh) (source builds, Linux wrapper) | `chafa`, `ffmpegthumbnailer` |
| Linux: `hyprctl` or `xdotool` + `xwininfo` | |
| macOS: Accessibility permission | |
Windows uses native window tracking and does not need the Linux compositor tools.
## Documentation ## Documentation
Full guides on configuration, Anki, Jellyfin, immersion tracking, and more at **[docs.subminer.moe](https://docs.subminer.moe)**. Full guides on configuration, Anki setup, Jellyfin, immersion tracking, and more: **[docs.subminer.moe](https://docs.subminer.moe)**
---
## Acknowledgments ## Acknowledgments
Built on [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner), [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui), [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script), and [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary). Subtitles from [Jimaku.cc](https://jimaku.cc). Lookups via [Yomitan](https://github.com/yomidevs/yomitan). JLPT tags from [yomitan-jlpt-vocab](https://github.com/stephenmk/yomitan-jlpt-vocab). SubMiner builds on the work of these open-source projects:
| Project | Role |
| ------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------- |
| [Anacreon-Script](https://github.com/friedrich-de/Anacreon-Script) | Inspiration for the mining workflow |
| [asbplayer](https://github.com/killergerbah/asbplayer) | Inspiration for subtitle sidebar and logic for YouTube subtitle parsing |
| [Bee's Character Dictionary](https://github.com/bee-san/Japanese_Character_Name_Dictionary) | Character name recognition in subtitles |
| [GameSentenceMiner](https://github.com/bpwhelan/GameSentenceMiner) | Inspiration for Electron overlay with Yomitan integration |
| [jellyfin-mpv-shim](https://github.com/jellyfin/jellyfin-mpv-shim) | Jellyfin integration |
| [Jimaku.cc](https://jimaku.cc) | Japanese subtitle search and downloads |
| [Renji's Texthooker Page](https://github.com/Renji-XD/texthooker-ui) | Base for the WebSocket texthooker integration |
| [Yomitan](https://github.com/yomidevs/yomitan) | Dictionary engine powering all lookups and the morphological parser |
| [yomitan-jlpt-vocab](https://github.com/stephenmk/yomitan-jlpt-vocab) | JLPT level tags for vocabulary |
## License ## License

View File

@@ -1,7 +1,7 @@
--- ---
id: TASK-194 id: TASK-194
title: Redesign YouTube subtitle acquisition around download-first track selection title: App-owned YouTube subtitle picker flow
status: To Do status: Done
assignee: [] assignee: []
created_date: '2026-03-18 07:52' created_date: '2026-03-18 07:52'
labels: [] labels: []
@@ -18,17 +18,16 @@ priority: medium
## Description ## Description
<!-- SECTION:DESCRIPTION:BEGIN --> <!-- SECTION:DESCRIPTION:BEGIN -->
Replace the current YouTube subtitle-generation-first flow with a download-first flow that enumerates available YouTube subtitle tracks, prompts for primary and secondary track selection before playback, downloads selected tracks into external subtitle files for mpv, and preserves generation as an explicit mode and as fallback behavior in auto mode. Keep the existing SubMiner tokenization and annotation pipeline as the downstream consumer of downloaded subtitle files. Replace the YouTube subtitle-generation-first flow with an app-owned picker flow that boots mpv paused, opens an overlay track picker, downloads selected subtitles into external subtitle files, and preserves generation as an explicit mode. Keep the existing SubMiner tokenization and annotation pipeline as the downstream consumer of downloaded subtitle files.
<!-- SECTION:DESCRIPTION:END --> <!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria ## Acceptance Criteria
<!-- AC:BEGIN --> <!-- AC:BEGIN -->
- [ ] #1 Launcher and config expose YouTube subtitle acquisition modes `download`, `generate`, and `auto`, with `download` as the default for launcher YouTube playback. - [x] #1 Launcher and app expose YouTube subtitle acquisition modes `download` and `generate`, with `download` as the default.
- [ ] #2 YouTube playback enumerates available subtitle tracks before mpv launch and presents a selection UI that supports primary and secondary subtitle choices. - [x] #2 YouTube playback boots mpv paused and presents an overlay selection UI for primary and secondary subtitle choices.
- [ ] #3 Selected YouTube subtitle tracks are downloaded to external subtitle files and loaded into mpv before playback starts when download mode succeeds. - [x] #3 Selected YouTube subtitle tracks are downloaded to external subtitle files and loaded into mpv before playback resumes.
- [ ] #4 `auto` mode attempts download-first for the selected tracks and falls back to generation only when required tracks cannot be downloaded or download fails. - [x] #4 `generate` mode preserves the existing subtitle generation path as an explicit opt-in behavior.
- [ ] #5 `generate` mode preserves the existing whisper/AI generation path as an explicit opt-in behavior. - [x] #5 Downloaded YouTube subtitle files integrate with the existing SubMiner subtitle/tokenization/annotation pipeline without regressing current overlay behavior.
- [ ] #6 Downloaded YouTube subtitle files integrate with the existing SubMiner subtitle/tokenization/annotation pipeline without regressing current overlay behavior. - [x] #6 Tests cover mode selection, subtitle-track enumeration/selection flow, and the paused bootstrap plus app handoff path.
- [ ] #7 Tests cover mode selection, subtitle-track enumeration/selection flow, download-first success path, and fallback behavior for auto mode. - [x] #7 User-facing config and launcher docs are updated to describe the new modes and default behavior.
- [ ] #8 User-facing config and launcher docs are updated to describe the new modes and default behavior.
<!-- AC:END --> <!-- AC:END -->

View File

@@ -0,0 +1,37 @@
---
id: TASK-214
title: Jump subtitle sidebar directly to resume position on first resolved cue
status: In Progress
assignee: []
created_date: '2026-03-21 11:15'
updated_date: '2026-03-21 11:15'
labels:
- bug
- ux
- overlay
- subtitles
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/renderer/modals/subtitle-sidebar.ts
- /Users/sudacode/projects/japanese/SubMiner/src/renderer/modals/subtitle-sidebar.test.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
When playback starts from a resumed timestamp while the subtitle sidebar is open, the sidebar currently smooth-scrolls from the top of the cue list to the resumed cue. Change the first resolved active-cue positioning to jump immediately to the resume location while preserving smooth auto-follow for later playback-driven cue advances.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 The first active cue resolved after open/resume uses an instant jump instead of smooth-scrolling through the list.
- [x] #2 Normal subtitle-sidebar auto-follow remains smooth after the first active cue has been positioned.
- [x] #3 Regression coverage distinguishes the initial jump behavior from later smooth auto-follow updates.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
2026-03-21: Fixed by treating the first auto-scroll from `previousActiveCueIndex < 0` as `behavior: 'auto'` in the subtitle sidebar scroll helper. Added renderer regression coverage for initial jump plus later smooth follow.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,40 @@
---
id: TASK-215
title: Add startup auto-open option for subtitle sidebar
status: In Progress
assignee: []
created_date: '2026-03-21 11:35'
updated_date: '2026-03-21 11:35'
labels:
- feature
- ux
- overlay
- subtitles
dependencies: []
references:
- /Users/sudacode/projects/japanese/SubMiner/src/types.ts
- /Users/sudacode/projects/japanese/SubMiner/src/config/definitions/defaults-subtitle.ts
- /Users/sudacode/projects/japanese/SubMiner/src/config/resolve/subtitle-domains.ts
- /Users/sudacode/projects/japanese/SubMiner/src/renderer/modals/subtitle-sidebar.ts
- /Users/sudacode/projects/japanese/SubMiner/src/renderer/renderer.ts
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Add a subtitle sidebar config option that auto-opens the sidebar once during overlay startup. The option should default to `false`, only apply when the sidebar feature is enabled, and should not force the sidebar back open later in the same session after manual close or later visibility changes.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `subtitleSidebar.autoOpen` is available in config with default `false`.
- [x] #2 When enabled, overlay startup opens the subtitle sidebar once after initial sidebar config/snapshot load.
- [x] #3 Regression coverage covers config resolution and startup-only auto-open behavior.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
2026-03-21: Added `subtitleSidebar.autoOpen` to types/defaults/config registry and resolver. Renderer bootstrap now calls a startup-only subtitle sidebar helper after the initial snapshot refresh. Modal regression coverage verifies startup auto-open requires both `enabled` and `autoOpen`.
<!-- SECTION:NOTES:END -->

View File

@@ -0,0 +1,79 @@
---
id: TASK-216
title: 'Address PR #28 CodeRabbit follow-ups on subtitle sidebar'
status: Completed
assignee:
- '@codex'
created_date: '2026-03-21 00:00'
updated_date: '2026-03-21 00:00'
labels:
- pr-review
- subtitle-sidebar
- renderer
dependencies: []
references:
- src/main/runtime/subtitle-prefetch-init.ts
- src/main/runtime/subtitle-prefetch-init.test.ts
- src/renderer/handlers/mouse.ts
- src/renderer/handlers/mouse.test.ts
- src/renderer/modals/subtitle-sidebar.ts
- src/renderer/modals/subtitle-sidebar.test.ts
- src/renderer/style.css
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Validate the CodeRabbit follow-ups on PR #28 for the subtitle sidebar workstream, implement the confirmed fixes, and verify the touched runtime and renderer paths.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Review comments that described real regressions are fixed in code
- [x] #2 Focused regression coverage exists for the fixed behaviors
- [x] #3 Targeted typecheck and runtime-compat verification pass
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Completed follow-up fixes for PR #28:
- Cleared parsed subtitle cues on subtitle prefetch init failure so stale snapshot cache entries do not survive a failed refresh.
- Treated primary and secondary subtitle containers as one hover region so moving between them does not resume playback mid-transition.
- Kept the subtitle sidebar closed when disabled, serialized snapshot polling with timeouts, made cue rows keyboard-activatable, resolved stale cue selection fallback, and resumed hover-paused playback when the modal closes.
Regression coverage added:
- `src/main/runtime/subtitle-prefetch-init.test.ts`
- `src/renderer/handlers/mouse.test.ts`
- `src/renderer/modals/subtitle-sidebar.test.ts`
Verification:
- `bun test src/main/runtime/subtitle-prefetch-init.test.ts`
- `bun test src/renderer/handlers/mouse.test.ts`
- `bun test src/renderer/modals/subtitle-sidebar.test.ts`
- `bun run typecheck`
- `bun run test:runtime:compat`
2026-03-21: Reopened to assess a newer CodeRabbit review pass on PR #28 and address any remaining valid action items before push/reply.
2026-03-21: Addressed the latest CodeRabbit follow-up pass in commit d70c6448 after rebasing onto the updated remote branch tip.
2026-03-21: Reopened for the latest CodeRabbit round on commit d70c6448; current actionable item is the invalid ctx.state.isOverSubtitleSidebar assignment in subtitle-sidebar.ts.
2026-03-22: Addressed the live hover-state and startup mouse-ignore follow-ups from the latest CodeRabbit pass. `handleMouseLeave()` now clears `isOverSubtitle` and drops `secondary-sub-hover-active` when leaving the secondary subtitle container toward the primary container, and renderer startup now calls `syncOverlayMouseIgnoreState(ctx)` instead of forcing `setIgnoreMouseEvents(true, { forward: true })`. The sidebar IPC hover catch and CSS spacing comments were already satisfied in the current tree.
2026-03-22: Regenerated `bun.lock` from a clean install so the `electron-builder-squirrel-windows` override now resolves at `26.8.2` in the lockfile alongside `app-builder-lib@26.8.2`.
2026-03-21: Finished the remaining cleanup pass from the latest review. `subtitleSidebar.layout` now uses enum validation, `SubtitleCue` is re-exported from `src/types.ts` as the single public type path, and the subtitle sidebar resize listener now has unload cleanup wired through the renderer.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Implemented the confirmed PR #28 CodeRabbit follow-ups for subtitle sidebar behavior and added regression coverage plus verification for the touched renderer and runtime paths.
Handled the latest CodeRabbit review pass for PR #28: accepted zero sidebar opacity, closed/inerted the sidebar when refresh sees config disabled, moved poll rescheduling out of finally, caught hover pause IPC failures, and fixed the stylelint spacing issue.
Verification: bun test src/config/resolve/subtitle-sidebar.test.ts; bun test src/renderer/modals/subtitle-sidebar.test.ts; bun test src/renderer/handlers/mouse.test.ts; bun run typecheck; bun run test:fast; bun run test:env; bun run build; SubMiner verifier lanes config + runtime-compat (including test:runtime:compat and test:smoke:dist).
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,69 @@
---
id: TASK-217
title: Fix embedded overlay passthrough sync between subtitle and sidebar
status: Done
assignee:
- codex
created_date: '2026-03-21 23:16'
updated_date: '2026-03-21 23:28'
labels:
- bug
- overlay
- macos
dependencies: []
references:
- src/renderer/handlers/mouse.ts
- src/renderer/modals/subtitle-sidebar.ts
- src/renderer/renderer.ts
documentation:
- docs/workflow/verification.md
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
On macOS, when both the subtitle overlay and embedded subtitle sidebar are visible, mouse passthrough to mpv can remain stale until the user hovers the sidebar. After closing the sidebar, passthrough can likewise remain stale until the user hovers the subtitle again. Fix the overlay input-state synchronization so passthrough reflects the current hover/open state immediately instead of relying on the last hover target.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 When the embedded subtitle sidebar is open and the pointer is not over subtitle or sidebar content, the overlay returns to mouse passthrough immediately without requiring a sidebar hover cycle.
- [x] #2 When transitioning between subtitle hover and sidebar hover states on macOS embedded sidebar mode, mouse ignore state stays in sync with the currently interactive region.
- [x] #3 Closing the embedded subtitle sidebar restores the correct passthrough state based on remaining subtitle hover/modal state without requiring an additional hover.
- [x] #4 Regression tests cover the passthrough synchronization behavior.
<!-- AC:END -->
## Implementation Plan
<!-- SECTION:PLAN:BEGIN -->
1. Add a shared renderer-side passthrough sync helper that derives whether the overlay should ignore mouse events from subtitle hover, embedded sidebar visibility/hover, popup visibility, and modal state.
2. Replace direct embedded-sidebar passthrough toggles in subtitle hover/sidebar handlers with calls to the shared sync helper so state is recomputed on every transition.
3. Add regression tests for macOS embedded sidebar mode covering sidebar-open idle passthrough, subtitle-to-sidebar transitions, and sidebar-close restore behavior.
4. Run targeted renderer tests for mouse/sidebar passthrough coverage, then summarize any residual risk.
<!-- SECTION:PLAN:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Added shared renderer overlay mouse-ignore recompute so subtitle hover, embedded sidebar hover/open/close, and popup idle transitions all derive passthrough from current state instead of last hover target.
Added regression coverage for embedded sidebar idle passthrough on subtitle leave and for sidebar-close recompute behavior.
Verification: `bun run typecheck` passed; `bun test src/renderer/handlers/mouse.test.ts` passed; `bun test src/renderer/modals/subtitle-sidebar.test.ts` passed; core verification wrapper artifact at `.tmp/skill-verification/subminer-verify-20260321-162743-XhSBxw` hit an unrelated `bun run test:fast` failure in `scripts/update-aur-package.test.ts` because macOS system bash lacks `mapfile`.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed stale embedded-sidebar passthrough sync on macOS by introducing a shared renderer mouse-ignore recompute path and tracking sidebar-hover state separately from subtitle hover. Subtitle hover leave, sidebar hover enter/leave, sidebar open, and sidebar close now all recompute passthrough from the current overlay state instead of waiting for a later hover event to repair it. Added regression tests covering subtitle-leave passthrough while the embedded sidebar is open but idle, plus sidebar-close restore behavior based on remaining subtitle hover state.
Tests run:
- `bun run typecheck`
- `bun test src/renderer/handlers/mouse.test.ts`
- `bun test src/renderer/modals/subtitle-sidebar.test.ts`
- `bash .agents/skills/subminer-change-verification/scripts/classify_subminer_diff.sh src/renderer/state.ts src/renderer/overlay-mouse-ignore.ts src/renderer/handlers/mouse.ts src/renderer/handlers/mouse.test.ts src/renderer/modals/subtitle-sidebar.ts src/renderer/modals/subtitle-sidebar.test.ts`
- `bash .agents/skills/subminer-change-verification/scripts/verify_subminer_change.sh --lane core src/renderer/state.ts src/renderer/overlay-mouse-ignore.ts src/renderer/handlers/mouse.ts src/renderer/handlers/mouse.test.ts src/renderer/modals/subtitle-sidebar.ts src/renderer/modals/subtitle-sidebar.test.ts` (typecheck passed; `test:fast` blocked by unrelated `scripts/update-aur-package.test.ts` failure on macOS Bash 3.2 lacking `mapfile`)
Risk: the classifier flagged this as a real-runtime candidate, so actual Electron/mpv macOS pointer behavior was not exercised in a live runtime during this turn.
<!-- SECTION:FINAL_SUMMARY:END -->

508
bun.lock

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +0,0 @@
type: fixed
area: anki
- Known-word cache refreshes now reconcile Anki changes incrementally instead of wiping and rebuilding on startup, mined cards can append their word into the cache immediately through a new default-enabled config flag, and explicit refreshes now run through `subminer doctor --refresh-known-words`.

View File

@@ -1,4 +0,0 @@
type: fixed
area: subtitle
- Restored known-word coloring and JLPT underlines for subtitle tokens like `大体` when the subtitle token is kanji but the known-word cache only matches the kana reading.

View File

@@ -1,4 +0,0 @@
type: fixed
area: stats
- Episode progress in the anime page now uses the last ended playback position instead of cumulative active watch time, avoiding distorted percentages after rewatches or repeated sessions.

View File

@@ -1,4 +0,0 @@
type: fixed
area: stats
- Anime episode progress now keeps the latest known playback position through active-session checkpoints and stale-session recovery, so recently watched episodes no longer lose their progress percentage.

View File

@@ -1,4 +0,0 @@
type: changed
area: docs
- Refreshed the vendored Texthooker docs/index.html bundle to match the latest local build artifacts.

View File

@@ -1,4 +0,0 @@
type: fixed
area: stats
- Anime episode progress now falls back to the latest retained subtitle/event timing when a session is missing a persisted playback-position checkpoint, so older watch sessions no longer get stuck at `0%` progress.

View File

@@ -0,0 +1,5 @@
type: docs
area: docs
- Added a new WebSocket / Texthooker API and integration guide covering websocket payloads, custom client patterns, mpv plugin automation, and webhook-style relay examples.
- Linked the new integration guide from configuration and mining workflow docs for easier discovery.

View File

@@ -0,0 +1,5 @@
type: fixed
area: immersion
- Hardened immersion tracker storage/session/query paths with the updated YouTube metadata flow.
- Added metadata probe support for YouTube subtitle retrieval edge cases.

View File

@@ -284,6 +284,30 @@
} // Secondary setting. } // Secondary setting.
}, // Primary and secondary subtitle styling. }, // Primary and secondary subtitle styling.
// ==========================================
// Subtitle Sidebar
// Parsed-subtitle sidebar cue list styling, behavior, and toggle key.
// Hot-reload: subtitle sidebar changes apply live without restarting SubMiner.
// ==========================================
"subtitleSidebar": {
"enabled": false, // Enable the subtitle sidebar feature for parsed subtitle sources. Values: true | false
"autoOpen": false, // Automatically open the subtitle sidebar once during overlay startup. Values: true | false
"layout": "overlay", // Render the subtitle sidebar as a floating overlay or reserve space inside mpv. Values: overlay | embedded
"toggleKey": "Backslash", // KeyboardEvent.code used to toggle the subtitle sidebar open and closed.
"pauseVideoOnHover": false, // Pause mpv while hovering the subtitle sidebar, then resume on leave. Values: true | false
"autoScroll": true, // Auto-scroll the active subtitle cue into view while playback advances. Values: true | false
"maxWidth": 420, // Maximum sidebar width in CSS pixels.
"opacity": 0.95, // Base opacity applied to the sidebar shell.
"backgroundColor": "rgba(73, 77, 100, 0.9)", // Background color for the subtitle sidebar shell.
"textColor": "#cad3f5", // Default cue text color in the subtitle sidebar.
"fontFamily": "\"M PLUS 1\", \"Noto Sans CJK JP\", sans-serif", // Font family used for subtitle sidebar cue text.
"fontSize": 16, // Base font size for subtitle sidebar cue text in CSS pixels.
"timestampColor": "#a5adcb", // Timestamp color in the subtitle sidebar.
"activeLineColor": "#f5bde6", // Text color for the active subtitle cue.
"activeLineBackgroundColor": "rgba(138, 173, 244, 0.22)", // Background color for the active subtitle cue.
"hoverLineBackgroundColor": "rgba(54, 58, 79, 0.84)" // Background color for hovered subtitle cues.
}, // Parsed-subtitle sidebar cue list styling, behavior, and toggle key.
// ========================================== // ==========================================
// Shared AI Provider // Shared AI Provider
// Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing. // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.

View File

@@ -95,6 +95,7 @@ export default {
{ text: 'Building & Testing', link: '/development' }, { text: 'Building & Testing', link: '/development' },
{ text: 'Architecture', link: '/architecture' }, { text: 'Architecture', link: '/architecture' },
{ text: 'IPC + Runtime Contracts', link: '/ipc-contracts' }, { text: 'IPC + Runtime Contracts', link: '/ipc-contracts' },
{ text: 'WebSocket + Texthooker API', link: '/websocket-texthooker-api' },
{ text: 'Changelog', link: '/changelog' }, { text: 'Changelog', link: '/changelog' },
], ],
}, },

View File

@@ -29,7 +29,8 @@ In both modes, the enrichment workflow is the same:
4. Fills the translation field from the secondary subtitle or AI. 4. Fills the translation field from the secondary subtitle or AI.
5. Writes metadata to the miscInfo field. 5. Writes metadata to the miscInfo field.
Polling mode uses the query `"deck:<your-deck>" added:1` to find recently added cards. If no deck is configured, it searches all decks. Polling mode uses the query `"deck:<ankiConnect.deck>" added:1` to find recently added cards. If no deck is configured, it searches all decks.
Known-word sync scope is controlled by `ankiConnect.knownWords.decks` (object map), with `ankiConnect.deck` used as legacy fallback.
### Proxy Mode Setup (Yomitan / Texthooker) ### Proxy Mode Setup (Yomitan / Texthooker)

View File

@@ -1,5 +1,23 @@
# Changelog # Changelog
## v0.9.0 (2026-03-22)
- Added an app-owned YouTube subtitle picker flow that boots mpv paused, opens an overlay picker, and downloads selected subtitles into external files before playback resumes.
- Added explicit launcher/app YouTube subtitle modes `download` and `generate`, with `download` as the default path.
- Disabled mpv native YouTube subtitle auto-loading for the app-owned flow so injected external subtitle files stay authoritative.
- Added OSD status updates covering YouTube playback startup, subtitle acquisition, and subtitle loading.
- Improved sidebar startup/resume behavior and overlay/sidebar subtitle synchronization.
## v0.8.0 (2026-03-22)
- Added a configurable subtitle sidebar feature (`subtitleSidebar`) with overlay/embedded rendering, click-to-seek cue list, and hot-reloadable visibility and behavior controls.
- Added a rendered sidebar modal with cue list display, click-to-seek, active-cue highlighting, and embedded layout support.
- Added sidebar snapshot plumbing between main and renderer for overlay/sidebar synchronization.
- Added sidebar configuration options for visibility and behavior (enabled, layout, toggle key, autoOpen, pauseOnHover, autoScroll) plus typography and sizing controls.
- Documented `subtitleSidebar` configuration and behavior in user-facing docs (configuration.md, shortcuts.md, config.example.jsonc).
- Updated subtitle prefetch/rendering flow to keep overlay and sidebar state in sync through media transitions.
- Kept sidebar cue tracking stable across playback transitions and timing edge cases.
- Fixed sidebar startup/resume positioning to jump directly to the first resolved active cue.
- Prevented stale subtitle refreshes from regressing active-cue state.
## v0.7.0 (2026-03-19) ## v0.7.0 (2026-03-19)
- Added a full local immersion dashboard release line with Overview, Library, Trends, Vocabulary, and Sessions drill-down views backed by SQLite tracking data. - Added a full local immersion dashboard release line with Overview, Library, Trends, Vocabulary, and Sessions drill-down views backed by SQLite tracking data.
- Added browser-first stats workflows: `subminer stats`, background stats daemon controls (`-b` / `-s`), stats cleanup, and dashboard-side mining actions with media enrichment. - Added browser-first stats workflows: `subminer stats`, background stats daemon controls (`-b` / `-s`), stats cleanup, and dashboard-side mining actions with media enrichment.

View File

@@ -17,6 +17,11 @@ For most users, start with this minimal configuration:
"ankiConnect": { "ankiConnect": {
"enabled": true, "enabled": true,
"deck": "YourDeckName", "deck": "YourDeckName",
"knownWords": {
"decks": {
"YourDeckName": ["Word", "Word Reading", "Expression"]
}
},
"fields": { "fields": {
"sentence": "Sentence", "sentence": "Sentence",
"audio": "Audio", "audio": "Audio",
@@ -26,6 +31,8 @@ For most users, start with this minimal configuration:
} }
``` ```
`ankiConnect.deck` is still accepted for backward-compatible polling scope and legacy known-word fallback behavior. For known-word cache scope, prefer `ankiConnect.knownWords.decks` with deck-to-fields mapping.
Then customize as needed using the sections below. Then customize as needed using the sections below.
## Configuration File ## Configuration File
@@ -59,6 +66,7 @@ SubMiner watches the active config file (`config.jsonc` or `config.json`) while
Hot-reloadable fields: Hot-reloadable fields:
- `subtitleStyle` - `subtitleStyle`
- `subtitleSidebar`
- `keybindings` - `keybindings`
- `shortcuts` - `shortcuts`
- `secondarySub.defaultMode` - `secondarySub.defaultMode`
@@ -88,6 +96,7 @@ The configuration file includes several main sections:
**Subtitle Display** **Subtitle Display**
- [**Subtitle Style**](#subtitle-style) - Appearance customization - [**Subtitle Style**](#subtitle-style) - Appearance customization
- [**Subtitle Sidebar**](#subtitle-sidebar) - Parsed cue list sidebar modal
- [**Subtitle Position**](#subtitle-position) - Overlay vertical positioning - [**Subtitle Position**](#subtitle-position) - Overlay vertical positioning
- [**Secondary Subtitles**](#secondary-subtitles) - Dual subtitle track support - [**Secondary Subtitles**](#secondary-subtitles) - Dual subtitle track support
@@ -193,6 +202,8 @@ Defaults warm everything (`true` for all toggles, `lowPowerMode: false`). Settin
The overlay includes a built-in WebSocket server that broadcasts subtitle text to connected clients (such as texthooker-ui) for external processing. The overlay includes a built-in WebSocket server that broadcasts subtitle text to connected clients (such as texthooker-ui) for external processing.
For endpoint details, payload examples, and client patterns, see [WebSocket / Texthooker API & Integration](/websocket-texthooker-api).
By default, the server uses "auto" mode: it starts automatically unless [mpv_websocket](https://github.com/kuroahna/mpv_websocket) is detected at `~/.config/mpv/mpv_websocket`. If you have mpv_websocket installed, the built-in server is skipped to avoid conflicts. By default, the server uses "auto" mode: it starts automatically unless [mpv_websocket](https://github.com/kuroahna/mpv_websocket) is detected at `~/.config/mpv/mpv_websocket`. If you have mpv_websocket installed, the built-in server is skipped to avoid conflicts.
See `config.example.jsonc` for detailed configuration options. See `config.example.jsonc` for detailed configuration options.
@@ -337,6 +348,48 @@ Secondary subtitle defaults: `fontFamily: "Inter, Noto Sans, Helvetica Neue, san
**See `config.example.jsonc`** for the complete list of subtitle style configuration options. **See `config.example.jsonc`** for the complete list of subtitle style configuration options.
### Subtitle Sidebar
Configure the parsed-subtitle sidebar modal.
```json
{
"subtitleSidebar": {
"enabled": false,
"autoOpen": false,
"layout": "overlay",
"toggleKey": "Backslash",
"pauseVideoOnHover": false,
"autoScroll": true,
"fontFamily": "\"M PLUS 1\", \"Noto Sans CJK JP\", sans-serif",
"fontSize": 16
}
}
```
| Option | Values | Description |
| --------------------------- | ---------------- | -------------------------------------------------------------------------------- |
| `enabled` | boolean | Enable subtitle sidebar support (`false` by default) |
| `autoOpen` | boolean | Open sidebar automatically on overlay startup (`false` by default) |
| `layout` | string | `"overlay"` floats over mpv; `"embedded"` reserves right-side player space to mimic browser-like layout |
| `toggleKey` | string | `KeyboardEvent.code` used to open/close the sidebar (default: `"Backslash"`) |
| `pauseVideoOnHover` | boolean | Pause playback while hovering the sidebar cue list |
| `autoScroll` | boolean | Keep the active cue in view while playback advances |
| `maxWidth` | number | Maximum sidebar width in CSS pixels (default: `420`) |
| `opacity` | number | Sidebar opacity between `0` and `1` (default: `0.95`) |
| `backgroundColor` | string | Sidebar shell background color |
| `textColor` | hex color | Default cue text color |
| `fontFamily` | string | CSS `font-family` value applied to sidebar cue text |
| `fontSize` | number | Base sidebar cue font size in CSS pixels (default: `16`) |
| `timestampColor` | hex color | Cue timestamp color |
| `activeLineColor` | hex color | Active cue text color |
| `activeLineBackgroundColor` | string | Active cue background color |
| `hoverLineBackgroundColor` | string | Hovered cue background color |
The sidebar is only available when the active subtitle source has been parsed into a cue list. Default colors use Catppuccin Macchiato with a semi-transparent shell so the panel stays readable without feeling like an opaque settings dialog.
`embedded` layout is intended to act like a split-pane view: it reserves player space with a right-side video margin and keeps interaction in both the player area and sidebar. If you see unexpected offset behavior in your environment, switch back to `overlay` to isolate sidebar placement.
`jlptColors` keys are: `jlptColors` keys are:
| Key | Default | Description | | Key | Default | Description |
@@ -707,6 +760,8 @@ Anki and YouTube subtitle cleanup both read this provider, then apply feature-lo
| `apiKey` | string | Static API key for the shared provider | | `apiKey` | string | Static API key for the shared provider |
| `apiKeyCommand` | string | Shell command used to resolve the API key | | `apiKeyCommand` | string | Shell command used to resolve the API key |
| `baseUrl` | string (URL) | OpenAI-compatible base URL | | `baseUrl` | string (URL) | OpenAI-compatible base URL |
| `model` | string | Optional model override for shared provider workflows |
| `systemPrompt` | string | Optional system prompt override for shared provider workflows |
| `requestTimeoutMs` | integer milliseconds | Shared request timeout (default: `15000`) | | `requestTimeoutMs` | integer milliseconds | Shared request timeout (default: `15000`) |
SubMiner uses the shared provider in two places: SubMiner uses the shared provider in two places:
@@ -796,8 +851,8 @@ This example is intentionally compact. The option table below documents availabl
| `proxy.port` | number | Bind port for local AnkiConnect proxy (default: `8766`) | | `proxy.port` | number | Bind port for local AnkiConnect proxy (default: `8766`) |
| `proxy.upstreamUrl` | string (URL) | Upstream AnkiConnect URL that proxy forwards to (default: `http://127.0.0.1:8765`) | | `proxy.upstreamUrl` | string (URL) | Upstream AnkiConnect URL that proxy forwards to (default: `http://127.0.0.1:8765`) |
| `tags` | array of strings | Tags automatically added to cards mined/updated by SubMiner (default: `['SubMiner']`; set `[]` to disable automatic tagging). | | `tags` | array of strings | Tags automatically added to cards mined/updated by SubMiner (default: `['SubMiner']`; set `[]` to disable automatic tagging). |
| `deck` | string | Anki deck to monitor for new cards | | `ankiConnect.deck` | string | Legacy Anki polling/compatibility scope. Newer known-word cache scoping should use `ankiConnect.knownWords.decks`. |
| `ankiConnect.knownWords.decks` | array of strings | Decks used for known-word cache lookups. When omitted/empty, falls back to `ankiConnect.deck`. | | `ankiConnect.knownWords.decks` | object | Deck→fields mapping for known-word cache queries (for example `{ "Kaishi 1.5k": ["Word", "Word Reading"] }`). |
| `fields.word` | string | Card field for mined word / expression text (default: `Expression`) | | `fields.word` | string | Card field for mined word / expression text (default: `Expression`) |
| `fields.audio` | string | Card field for audio files (default: `ExpressionAudio`) | | `fields.audio` | string | Card field for audio files (default: `ExpressionAudio`) |
| `fields.image` | string | Card field for images (default: `Picture`) | | `fields.image` | string | Card field for images (default: `Picture`) |
@@ -818,6 +873,7 @@ This example is intentionally compact. The option table below documents availabl
| `media.animatedMaxWidth` | number (px) | Max width for animated AVIF (default: `640`) | | `media.animatedMaxWidth` | number (px) | Max width for animated AVIF (default: `640`) |
| `media.animatedMaxHeight` | number (px) | Optional max height for animated AVIF. Unset keeps source aspect-constrained height. | | `media.animatedMaxHeight` | number (px) | Optional max height for animated AVIF. Unset keeps source aspect-constrained height. |
| `media.animatedCrf` | number (0-63) | CRF quality for AVIF; lower = higher quality (default: `35`) | | `media.animatedCrf` | number (0-63) | CRF quality for AVIF; lower = higher quality (default: `35`) |
| `media.syncAnimatedImageToWordAudio` | `true`, `false` | Whether animated AVIF includes an opening frame synced to sentence word-audio timing (default: `true`). |
| `media.audioPadding` | number (seconds) | Padding around audio clip timing (default: `0.5`) | | `media.audioPadding` | number (seconds) | Padding around audio clip timing (default: `0.5`) |
| `media.fallbackDuration` | number (seconds) | Default duration if timing unavailable (default: `3.0`) | | `media.fallbackDuration` | number (seconds) | Default duration if timing unavailable (default: `3.0`) |
| `media.maxMediaDuration` | number (seconds) | Max duration for generated media from multi-line copy (default: `30`, `0` to disable) | | `media.maxMediaDuration` | number (seconds) | Max duration for generated media from multi-line copy (default: `30`, `0` to disable) |
@@ -826,10 +882,11 @@ This example is intentionally compact. The option table below documents availabl
| `behavior.mediaInsertMode` | `"append"`, `"prepend"` | Where to insert new media when overwrite is off (default: `"append"`) | | `behavior.mediaInsertMode` | `"append"`, `"prepend"` | Where to insert new media when overwrite is off (default: `"append"`) |
| `behavior.highlightWord` | `true`, `false` | Highlight the word in sentence context (default: `true`) | | `behavior.highlightWord` | `true`, `false` | Highlight the word in sentence context (default: `true`) |
| `ankiConnect.knownWords.highlightEnabled` | `true`, `false` | Enable fast local highlighting for words already known in Anki (default: `false`) | | `ankiConnect.knownWords.highlightEnabled` | `true`, `false` | Enable fast local highlighting for words already known in Anki (default: `false`) |
| `ankiConnect.knownWords.addMinedWordsImmediately` | `true`, `false` | Add words from successful mines into the local known-word cache immediately (default: `true`) |
| `ankiConnect.knownWords.color` | hex color string | Text color for tokens already found in the local known-word cache (default: `"#a6da95"`). | | `ankiConnect.knownWords.color` | hex color string | Text color for tokens already found in the local known-word cache (default: `"#a6da95"`). |
| `ankiConnect.knownWords.matchMode` | `"headword"`, `"surface"` | Matching strategy for known-word highlighting (default: `"headword"`). `headword` uses token headwords; `surface` uses visible subtitle text. | | `ankiConnect.knownWords.matchMode` | `"headword"`, `"surface"` | Matching strategy for known-word highlighting (default: `"headword"`). `headword` uses token headwords; `surface` uses visible subtitle text. |
| `ankiConnect.knownWords.refreshMinutes` | number | Minutes between known-word cache refreshes (default: `1440`) | | `ankiConnect.knownWords.refreshMinutes` | number | Minutes between known-word cache refreshes (default: `1440`) |
| `ankiConnect.knownWords.decks` | array of strings | Decks used by known-word cache refresh. Leave empty for compatibility with legacy `deck` scope. | | `ankiConnect.knownWords.decks` | object | Deck→fields mapping used for known-word cache query scope (e.g. `{ "Kaishi 1.5k": ["Word", "Word Reading"] }`). |
| `ankiConnect.nPlusOne.nPlusOne` | hex color string | Text color for the single target token to study when exactly one unknown candidate exists in a sentence (default: `"#c6a0f6"`). | | `ankiConnect.nPlusOne.nPlusOne` | hex color string | Text color for the single target token to study when exactly one unknown candidate exists in a sentence (default: `"#c6a0f6"`). |
| `ankiConnect.nPlusOne.minSentenceWords` | number | Minimum number of words required in a sentence before single unknown-word N+1 highlighting can trigger (default: `3`). | | `ankiConnect.nPlusOne.minSentenceWords` | number | Minimum number of words required in a sentence before single unknown-word N+1 highlighting can trigger (default: `3`). |
| `behavior.notificationType` | `"osd"`, `"system"`, `"both"`, `"none"` | Notification type on card update (default: `"osd"`) | | `behavior.notificationType` | `"osd"`, `"system"`, `"both"`, `"none"` | Notification type on card update (default: `"osd"`) |
@@ -875,7 +932,7 @@ Known-word cache policy:
- `ankiConnect.nPlusOne.nPlusOne` sets the color for the single target token when exactly one eligible unknown word exists. - `ankiConnect.nPlusOne.nPlusOne` sets the color for the single target token when exactly one eligible unknown word exists.
- `ankiConnect.nPlusOne.minSentenceWords` sets the minimum token count required in a sentence for N+1 highlighting (default: `3`). - `ankiConnect.nPlusOne.minSentenceWords` sets the minimum token count required in a sentence for N+1 highlighting (default: `3`).
- `ankiConnect.knownWords.color` sets the known-word highlight color for tokens already in Anki. - `ankiConnect.knownWords.color` sets the known-word highlight color for tokens already in Anki.
- `ankiConnect.knownWords.decks` accepts one or more decks. If empty, it uses the legacy single `ankiConnect.deck` value as scope. - `ankiConnect.knownWords.decks` accepts an object keyed by deck name. If omitted or empty, it falls back to the legacy `ankiConnect.deck` single-deck scope.
- Cache state is persisted to `known-words-cache.json` under the app `userData` directory. - Cache state is persisted to `known-words-cache.json` under the app `userData` directory.
- The cache is automatically invalidated when the configured scope changes (for example, when deck changes). - The cache is automatically invalidated when the configured scope changes (for example, when deck changes).
- Cache lookups are in-memory. By default, token headwords are matched against cached `Expression` / `Word` values; set `ankiConnect.knownWords.matchMode` to `"surface"` for raw subtitle text matching. - Cache lookups are in-memory. By default, token headwords are matched against cached `Expression` / `Word` values; set `ankiConnect.knownWords.matchMode` to `"surface"` for raw subtitle text matching.
@@ -1239,7 +1296,7 @@ Configure the local stats UI served from SubMiner and the in-app stats overlay t
{ {
"stats": { "stats": {
"toggleKey": "Backquote", "toggleKey": "Backquote",
"serverPort": 5175, "serverPort": 6969,
"autoStartServer": true, "autoStartServer": true,
"autoOpenBrowser": true "autoOpenBrowser": true
} }
@@ -1249,7 +1306,7 @@ Configure the local stats UI served from SubMiner and the in-app stats overlay t
| Option | Values | Description | | Option | Values | Description |
| ----------------- | ----------------- | --------------------------------------------------------------------------- | | ----------------- | ----------------- | --------------------------------------------------------------------------- |
| `toggleKey` | Electron key code | Overlay-local key code used to toggle the stats overlay. Default `Backquote`. | | `toggleKey` | Electron key code | Overlay-local key code used to toggle the stats overlay. Default `Backquote`. |
| `serverPort` | integer | Localhost port for the browser stats UI. Default `5175`. | | `serverPort` | integer | Localhost port for the browser stats UI. Default `6969`. |
| `autoStartServer` | `true`, `false` | Start the local stats HTTP server automatically once immersion tracking is active. Default `true`. | | `autoStartServer` | `true`, `false` | Start the local stats HTTP server automatically once immersion tracking is active. Default `true`. |
| `autoOpenBrowser` | `true`, `false` | When `subminer stats` starts the server on demand, also open the dashboard in your default browser. Default `true`. | | `autoOpenBrowser` | `true`, `false` | When `subminer stats` starts the server on demand, also open the dashboard in your default browser. Default `true`. |

View File

@@ -28,7 +28,7 @@ The same immersion data powers the stats dashboard.
- Launcher command: run `subminer stats` to start the local stats server on demand and open the dashboard in your browser. - Launcher command: run `subminer stats` to start the local stats server on demand and open the dashboard in your browser.
- Background server: run `subminer stats -b` to start or reuse a dedicated background stats daemon without keeping the launcher attached, and `subminer stats -s` to stop that daemon. - Background server: run `subminer stats -b` to start or reuse a dedicated background stats daemon without keeping the launcher attached, and `subminer stats -s` to stop that daemon.
- Maintenance command: run `subminer stats cleanup` or `subminer stats cleanup -v` to backfill/repair vocabulary metadata (`headword`, `reading`, POS) and purge stale or excluded rows from `imm_words` on demand. - Maintenance command: run `subminer stats cleanup` or `subminer stats cleanup -v` to backfill/repair vocabulary metadata (`headword`, `reading`, POS) and purge stale or excluded rows from `imm_words` on demand.
- Browser page: open `http://127.0.0.1:5175` directly if the local stats server is already running. - Browser page: open `http://127.0.0.1:6969` directly if the local stats server is already running.
### Dashboard Tabs ### Dashboard Tabs
@@ -42,6 +42,8 @@ Recent sessions, streak calendar, watch-time history, and a tracking snapshot wi
Cover-art library with search and sorting, per-series progress, episode drill-down, and direct links into mined cards. Cover-art library with search and sorting, per-series progress, episode drill-down, and direct links into mined cards.
When YouTube channel metadata is available, the Library tab groups videos by creator/channel and treats each tracked video as an episode-like entry inside that channel section.
![Stats Library](/screenshots/stats-library.png) ![Stats Library](/screenshots/stats-library.png)
#### Trends #### Trends
@@ -68,7 +70,7 @@ Stats server config lives under `stats`:
{ {
"stats": { "stats": {
"toggleKey": "Backquote", "toggleKey": "Backquote",
"serverPort": 5175, "serverPort": 6969,
"autoStartServer": true, "autoStartServer": true,
"autoOpenBrowser": true "autoOpenBrowser": true
} }

View File

@@ -6,11 +6,28 @@ This guide walks through the sentence mining loop — from watching a video to c
SubMiner runs as a transparent overlay on top of mpv. As subtitles play, the overlay displays them as interactive text. You hover a word, trigger Yomitan lookup with your configured lookup key/modifier, then create an Anki card with a single action. SubMiner automatically attaches the sentence, audio clip, and screenshot. SubMiner runs as a transparent overlay on top of mpv. As subtitles play, the overlay displays them as interactive text. You hover a word, trigger Yomitan lookup with your configured lookup key/modifier, then create an Anki card with a single action. SubMiner automatically attaches the sentence, audio clip, and screenshot.
```text ```mermaid
Watch video → See subtitle → Hover word + trigger lookup → Yomitan popup → Add to Anki flowchart LR
classDef step fill:#c6a0f6,stroke:#494d64,color:#24273a,stroke-width:1.5px
SubMiner auto-fills: classDef action fill:#8aadf4,stroke:#494d64,color:#24273a,stroke-width:1.5px
sentence, audio, image, translation classDef result fill:#a6da95,stroke:#494d64,color:#24273a,stroke-width:1.5px
classDef enrich fill:#8bd5ca,stroke:#494d64,color:#24273a,stroke-width:1.5px
Watch["Watch Video"]:::step
Sub["Subtitle Appears"]:::step
Hover["Hover Word"]:::action
Lookup["Trigger Lookup"]:::action
Yomi["Yomitan Popup"]:::result
Add["Add to Anki"]:::result
Watch --> Sub --> Hover --> Lookup --> Yomi --> Add
Add --> Enrich["SubMiner Enriches"]:::enrich
Enrich --> S["Sentence"]:::enrich
Enrich --> A["Audio Clip"]:::enrich
Enrich --> I["Screenshot"]:::enrich
Enrich --> T["Translation"]:::enrich
``` ```
## Subtitle Delivery Path (Startup + Runtime) ## Subtitle Delivery Path (Startup + Runtime)
@@ -176,6 +193,8 @@ SubMiner runs a local HTTP server at `http://127.0.0.1:5174` (configurable port)
The texthooker page displays the current subtitle and updates as new lines arrive. This is useful if you prefer to do lookups in a browser rather than through the overlay's built-in Yomitan. The texthooker page displays the current subtitle and updates as new lines arrive. This is useful if you prefer to do lookups in a browser rather than through the overlay's built-in Yomitan.
If you want to build your own browser client, websocket consumer, or automation relay, see [WebSocket / Texthooker API & Integration](/websocket-texthooker-api).
## Subtitle Sync (Subsync) ## Subtitle Sync (Subsync)
If your subtitle file is out of sync with the audio, SubMiner can resynchronize it using [alass](https://github.com/kaegi/alass) or [ffsubsync](https://github.com/smacke/ffsubsync). If your subtitle file is out of sync with the audio, SubMiner can resynchronize it using [alass](https://github.com/kaegi/alass) or [ffsubsync](https://github.com/smacke/ffsubsync).
@@ -206,7 +225,7 @@ Enable it in your config:
} }
``` ```
Open the dashboard in the overlay with `stats.toggleKey` (default: `` ` ``), launch it in a browser with `subminer stats`, keep a dedicated background server alive with `subminer stats -b`, stop that background server with `subminer stats -s`, or visit `http://127.0.0.1:5175` directly if the local stats server is already running. The dashboard covers overview totals, anime progress, session detail, and vocabulary drill-down from the same local immersion database. Open the dashboard in the overlay with `stats.toggleKey` (default: `` ` ``), launch it in a browser with `subminer stats`, keep a dedicated background server alive with `subminer stats -b`, stop that background server with `subminer stats -s`, or visit `http://127.0.0.1:6969` directly if the local stats server is already running. The dashboard covers overview totals, anime progress, session detail, and vocabulary drill-down from the same local immersion database.
See [Immersion Tracking](/immersion-tracking) for dashboard details, schema, and retention settings. See [Immersion Tracking](/immersion-tracking) for dashboard details, schema, and retention settings.

View File

@@ -284,6 +284,30 @@
} // Secondary setting. } // Secondary setting.
}, // Primary and secondary subtitle styling. }, // Primary and secondary subtitle styling.
// ==========================================
// Subtitle Sidebar
// Parsed-subtitle sidebar cue list styling, behavior, and toggle key.
// Hot-reload: subtitle sidebar changes apply live without restarting SubMiner.
// ==========================================
"subtitleSidebar": {
"enabled": false, // Enable the subtitle sidebar feature for parsed subtitle sources. Values: true | false
"autoOpen": false, // Automatically open the subtitle sidebar once during overlay startup. Values: true | false
"layout": "overlay", // Render the subtitle sidebar as a floating overlay or reserve space inside mpv. Values: overlay | embedded
"toggleKey": "Backslash", // KeyboardEvent.code used to toggle the subtitle sidebar open and closed.
"pauseVideoOnHover": false, // Pause mpv while hovering the subtitle sidebar, then resume on leave. Values: true | false
"autoScroll": true, // Auto-scroll the active subtitle cue into view while playback advances. Values: true | false
"maxWidth": 420, // Maximum sidebar width in CSS pixels.
"opacity": 0.95, // Base opacity applied to the sidebar shell.
"backgroundColor": "rgba(73, 77, 100, 0.9)", // Background color for the subtitle sidebar shell.
"textColor": "#cad3f5", // Default cue text color in the subtitle sidebar.
"fontFamily": "\"M PLUS 1\", \"Noto Sans CJK JP\", sans-serif", // Font family used for subtitle sidebar cue text.
"fontSize": 16, // Base font size for subtitle sidebar cue text in CSS pixels.
"timestampColor": "#a5adcb", // Timestamp color in the subtitle sidebar.
"activeLineColor": "#f5bde6", // Text color for the active subtitle cue.
"activeLineBackgroundColor": "rgba(138, 173, 244, 0.22)", // Background color for the active subtitle cue.
"hoverLineBackgroundColor": "rgba(54, 58, 79, 0.84)" // Background color for hovered subtitle cues.
}, // Parsed-subtitle sidebar cue list styling, behavior, and toggle key.
// ========================================== // ==========================================
// Shared AI Provider // Shared AI Provider
// Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing. // Canonical OpenAI-compatible provider transport settings shared by Anki and YouTube subtitle fixing.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 614 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.4 MiB

After

Width:  |  Height:  |  Size: 1.4 MiB

View File

@@ -68,10 +68,13 @@ Mouse-hover playback behavior is configured separately from shortcuts: `subtitle
| `Ctrl/Cmd+Shift+O` | Open runtime options palette | `shortcuts.openRuntimeOptions` | | `Ctrl/Cmd+Shift+O` | Open runtime options palette | `shortcuts.openRuntimeOptions` |
| `Ctrl+Shift+J` | Open Jimaku subtitle search modal | `shortcuts.openJimaku` | | `Ctrl+Shift+J` | Open Jimaku subtitle search modal | `shortcuts.openJimaku` |
| `Ctrl+Alt+S` | Open subtitle sync (subsync) modal | `shortcuts.triggerSubsync` | | `Ctrl+Alt+S` | Open subtitle sync (subsync) modal | `shortcuts.triggerSubsync` |
| `\` | Toggle subtitle sidebar | `subtitleSidebar.toggleKey` |
| `` ` `` | Toggle stats overlay | `stats.toggleKey` | | `` ` `` | Toggle stats overlay | `stats.toggleKey` |
The stats toggle is handled inside the focused visible overlay window. It is configurable through the top-level `stats.toggleKey` setting and defaults to `Backquote`. The stats toggle is handled inside the focused visible overlay window. It is configurable through the top-level `stats.toggleKey` setting and defaults to `Backquote`.
The subtitle sidebar toggle is overlay-local and only opens when SubMiner has a parsed cue list for the active subtitle source.
## Controller Shortcuts ## Controller Shortcuts
These overlay-local shortcuts are fixed and open controller utilities for the Chrome Gamepad API integration. These overlay-local shortcuts are fixed and open controller utilities for the Chrome Gamepad API integration.
@@ -133,4 +136,4 @@ The `keybindings` array overrides or extends the overlay's built-in key handling
} }
``` ```
Both `shortcuts` and `keybindings` are [hot-reloadable](/configuration#hot-reload-behavior) — changes take effect without restarting SubMiner. Both `shortcuts`, `keybindings`, and `subtitleSidebar` are [hot-reloadable](/configuration#hot-reload-behavior) — changes take effect without restarting SubMiner.

View File

@@ -24,7 +24,7 @@ N+1 highlighting identifies sentences where you know every word except one, maki
| --- | --- | --- | | --- | --- | --- |
| `ankiConnect.knownWords.highlightEnabled` | `false` | Enable known-word cache lookups used by N+1 highlighting | | `ankiConnect.knownWords.highlightEnabled` | `false` | Enable known-word cache lookups used by N+1 highlighting |
| `ankiConnect.knownWords.refreshMinutes` | `1440` | Minutes between Anki cache refreshes | | `ankiConnect.knownWords.refreshMinutes` | `1440` | Minutes between Anki cache refreshes |
| `ankiConnect.knownWords.decks` | `[]` | Decks to query (falls back to `ankiConnect.deck`) | | `ankiConnect.knownWords.decks` | `{}` | Deck→fields map for known-word cache queries (legacy fallback: `ankiConnect.deck`) |
| `ankiConnect.knownWords.matchMode` | `"headword"` | `"headword"` (dictionary form) or `"surface"` (raw text) | | `ankiConnect.knownWords.matchMode` | `"headword"` | `"headword"` (dictionary form) or `"surface"` (raw text) |
| `ankiConnect.nPlusOne.minSentenceWords` | `3` | Minimum tokens in a sentence for N+1 to trigger | | `ankiConnect.nPlusOne.minSentenceWords` | `3` | Minimum tokens in a sentence for N+1 to trigger |
| `ankiConnect.nPlusOne.nPlusOne` | `#c6a0f6` | Color for the single unknown target word | | `ankiConnect.nPlusOne.nPlusOne` | `#c6a0f6` | Color for the single unknown target word |

View File

@@ -0,0 +1,357 @@
# WebSocket / Texthooker API & Integration
SubMiner exposes a small set of local integration surfaces for browser tools, automation helpers, and mpv-driven workflows:
- **Subtitle WebSocket** at `ws://127.0.0.1:6677` by default for plain subtitle pushes.
- **Annotation WebSocket** at `ws://127.0.0.1:6678` by default for token-aware clients.
- **Texthooker HTTP UI** at `http://127.0.0.1:5174` by default for browser-based subtitle consumption.
- **mpv plugin script messages** for in-player automation and extension.
This page documents those integration points and shows how to build custom consumers around them.
## Quick Reference
| Surface | Default | Purpose |
| --- | --- | --- |
| `websocket` | `ws://127.0.0.1:6677` | Basic subtitle broadcast stream |
| `annotationWebsocket` | `ws://127.0.0.1:6678` | Structured stream with token metadata |
| `texthooker` | `http://127.0.0.1:5174` | Local texthooker UI with injected websocket config |
| mpv plugin | `script-message subminer-*` | Start/stop/toggle/status automation inside mpv |
## Enable and Configure the Services
SubMiner's integration ports are configured in `config.jsonc`.
```jsonc
{
"websocket": {
"enabled": "auto",
"port": 6677
},
"annotationWebsocket": {
"enabled": true,
"port": 6678
},
"texthooker": {
"launchAtStartup": true,
"openBrowser": true
}
}
```
### How startup behaves
- `websocket.enabled: "auto"` starts the basic subtitle websocket unless SubMiner detects the external `mpv_websocket` plugin.
- `annotationWebsocket` is independent from `websocket` and stays enabled unless you explicitly disable it.
- `texthooker.launchAtStartup` starts the local HTTP UI automatically.
- `texthooker.openBrowser` controls whether SubMiner opens the texthooker page in your browser when it starts.
If you use the [mpv plugin](/mpv-plugin), it can also start a texthooker-only helper process and override the texthooker port in `subminer.conf`.
## Developer API Documentation
### 1. Subtitle WebSocket
Use the basic subtitle websocket when you only need the current subtitle line and a ready-to-render HTML sentence string.
- **Default URL:** `ws://127.0.0.1:6677`
- **Transport:** local WebSocket server bound to `127.0.0.1`
- **Direction:** server push only
- **Client auth:** none
- **Reconnects:** client-managed
When a client connects, SubMiner immediately sends the latest subtitle payload if one is available. After that, it pushes a new message each time the current subtitle changes.
#### Message shape
```json
{
"version": 1,
"text": "無事",
"sentence": "<span class=\"word word-known word-jlpt-n2\" data-reading=\"ぶじ\" data-headword=\"無事\" data-frequency-rank=\"745\" data-jlpt-level=\"N2\">無事</span>",
"tokens": [
{
"surface": "無事",
"reading": "ぶじ",
"headword": "無事",
"startPos": 0,
"endPos": 2,
"partOfSpeech": "other",
"isMerged": false,
"isKnown": true,
"isNPlusOneTarget": false,
"isNameMatch": false,
"jlptLevel": "N2",
"frequencyRank": 745,
"className": "word word-known word-jlpt-n2",
"frequencyRankLabel": "745",
"jlptLevelLabel": "N2"
}
]
}
```
#### Field reference
| Field | Type | Notes |
| --- | --- | --- |
| `version` | number | Current websocket payload version. Today this is `1`. |
| `text` | string | Raw subtitle text. |
| `sentence` | string | HTML string with `<span>` wrappers and `data-*` attributes for client rendering. |
| `tokens` | array | Token metadata; empty when the subtitle is not tokenized yet. |
Each token may include:
| Token field | Type | Notes |
| --- | --- | --- |
| `surface` | string | Display text for the token |
| `reading` | string | Kana reading when available |
| `headword` | string | Dictionary headword when available |
| `startPos` / `endPos` | number | Character offsets in the subtitle text |
| `partOfSpeech` | string | SubMiner token POS label |
| `isMerged` | boolean | Whether this token represents merged content |
| `isKnown` | boolean | Marked known by SubMiner's known-word logic |
| `isNPlusOneTarget` | boolean | True when the token is the sentence's N+1 target |
| `isNameMatch` | boolean | True for prioritized character-name matches |
| `frequencyRank` | number | Frequency rank when available |
| `jlptLevel` | string | JLPT level when available |
| `className` | string | CSS-ready class list derived from token state |
| `frequencyRankLabel` | string or `null` | Preformatted rank label for UIs |
| `jlptLevelLabel` | string or `null` | Preformatted JLPT label for UIs |
### 2. Annotation WebSocket
Use the annotation websocket for custom clients that want the same structured token payload the bundled texthooker UI consumes.
- **Default URL:** `ws://127.0.0.1:6678`
- **Payload shape:** same JSON contract as the basic subtitle websocket
- **Primary difference:** this stream is intended to stay on even when the basic websocket auto-disables because `mpv_websocket` is installed
In practice, if you are building a new client, prefer `annotationWebsocket` unless you specifically need compatibility with an existing `websocket` consumer.
### 3. HTML markup conventions
The `sentence` field is pre-rendered HTML generated by SubMiner. Depending on token state, it can include classes such as:
- `word`
- `word-known`
- `word-n-plus-one`
- `word-name-match`
- `word-jlpt-n1` through `word-jlpt-n5`
- `word-frequency-single`
- `word-frequency-band-1` through `word-frequency-band-5`
SubMiner also adds tooltip-friendly data attributes when available:
- `data-reading`
- `data-headword`
- `data-frequency-rank`
- `data-jlpt-level`
If you need a fully custom UI, ignore `sentence` and render from `tokens` instead.
## Texthooker Integration Guide
### When to use the bundled texthooker page
Use texthooker when you want a browser tab that:
- updates live from current subtitles
- works well with browser-based Yomitan setups
- inherits SubMiner's coloring preferences and websocket URL automatically
Start it with either:
```bash
subminer texthooker
```
or by leaving `texthooker.launchAtStartup` enabled.
### What SubMiner injects into the page
When SubMiner serves the local texthooker UI, it injects bootstrap values into `window.localStorage`, including:
- `bannou-texthooker-websocketUrl`
- coloring toggles for known/N+1/name/frequency/JLPT styling
- CSS custom properties for SubMiner's token colors
That means the bundled page already knows which websocket to connect to and which color palette to use.
### Build a custom websocket client
Here is a minimal browser client for the annotation stream:
```html
<!doctype html>
<meta charset="utf-8" />
<title>SubMiner client</title>
<div id="subtitle">Waiting for subtitles...</div>
<script>
const subtitle = document.getElementById('subtitle');
const ws = new WebSocket('ws://127.0.0.1:6678');
ws.addEventListener('message', (event) => {
const payload = JSON.parse(event.data);
subtitle.innerHTML = payload.sentence || payload.text;
});
ws.addEventListener('close', () => {
subtitle.textContent = 'Connection closed; reload or reconnect.';
});
</script>
```
### Build a custom Node client
```js
import WebSocket from 'ws';
const ws = new WebSocket('ws://127.0.0.1:6678');
ws.on('message', (raw) => {
const payload = JSON.parse(String(raw));
console.log({
text: payload.text,
tokens: payload.tokens.length,
firstToken: payload.tokens[0]?.surface ?? null,
});
});
```
### Integration tips
- Bind only to `127.0.0.1`; these services are local-only by design.
- Handle empty `tokens` arrays gracefully because subtitle text can arrive before tokenization completes.
- Reconnect on disconnect; SubMiner does not manage client reconnects for you.
- Prefer `payload.text` for logging/automation and `payload.sentence` or `payload.tokens` for UI rendering.
## Plugin Development
SubMiner does **not** currently expose a general-purpose third-party plugin SDK inside the app itself. Today, the supported extension surfaces are:
1. the local websocket streams
2. the local texthooker UI
3. the mpv Lua plugin's script-message API
4. the launcher CLI
### mpv script messages
The mpv plugin accepts these script messages:
```text
script-message subminer-start
script-message subminer-stop
script-message subminer-toggle
script-message subminer-menu
script-message subminer-options
script-message subminer-restart
script-message subminer-status
script-message subminer-autoplay-ready
script-message subminer-aniskip-refresh
script-message subminer-skip-intro
```
The start command also accepts inline overrides:
```text
script-message subminer-start backend=hyprland socket=/custom/path texthooker=no log-level=debug
```
### Practical extension patterns
#### Add another mpv script that coordinates with SubMiner
Examples:
- send `subminer-start` after your own media-selection script chooses a file
- send `subminer-status` before running follow-up automation
- send `subminer-aniskip-refresh` after you update title/episode metadata
#### Build a launcher wrapper
Examples:
- open a media picker, then call `subminer /path/to/file.mkv`
- launch browser-only subtitle tooling with `subminer texthooker`
- disable the helper UI for a session with `subminer --no-texthooker video.mkv`
#### Build an overlay-adjacent client
Examples:
- browser widget showing current subtitle + token breakdown
- local vocabulary capture helper that writes interesting lines to a file
- bridge service that forwards websocket events into your own workflow engine
## Webhook Examples
SubMiner does **not** currently send outbound webhooks by itself. The supported pattern is to consume the websocket locally and relay events into another system.
That still makes webhook-style automation straightforward.
### Example: forward subtitle lines to a local webhook receiver
```js
import WebSocket from 'ws';
const ws = new WebSocket('ws://127.0.0.1:6678');
ws.on('message', async (raw) => {
const payload = JSON.parse(String(raw));
await fetch('http://127.0.0.1:5678/subminer/subtitle', {
method: 'POST',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({
text: payload.text,
tokens: payload.tokens,
receivedAt: new Date().toISOString(),
}),
});
});
```
### Automation ideas
- **n8n / Make / Zapier relay:** send each subtitle line into an automation workflow for logging, translation, or summarization.
- **Discord / Slack notifier:** post only lines that contain unknown words or N+1 targets.
- **Obsidian / Markdown capture:** append subtitle lines plus token metadata to a daily immersion note.
- **Local LLM pipeline:** trigger a glossary, translation, or sentence-mining workflow whenever a new line arrives.
### Filtering example: only forward N+1 lines
```js
import WebSocket from 'ws';
const ws = new WebSocket('ws://127.0.0.1:6678');
ws.on('message', async (raw) => {
const payload = JSON.parse(String(raw));
const hasNPlusOne = payload.tokens.some((token) => token.isNPlusOneTarget);
if (!hasNPlusOne) return;
await fetch('http://127.0.0.1:5678/subminer/n-plus-one', {
method: 'POST',
headers: { 'content-type': 'application/json' },
body: JSON.stringify({ text: payload.text, tokens: payload.tokens }),
});
});
```
## Recommended Integration Combinations
- **Browser Yomitan client:** `texthooker` + `annotationWebsocket`
- **Custom dashboard:** `annotationWebsocket` only
- **Lightweight subtitle mirror:** `websocket` only
- **mpv-side automation:** mpv plugin script messages + optional websocket relay
- **Webhook-style workflows:** `annotationWebsocket` + your own local relay service
## Related Pages
- [Configuration](/configuration#websocket-server)
- [Mining Workflow — Texthooker](/mining-workflow#texthooker)
- [MPV Plugin](/mpv-plugin)
- [Launcher Script](/launcher-script)
- [Anki Integration](/anki-integration#proxy-mode-setup-yomitan--texthooker)

View File

@@ -8,6 +8,7 @@
4. Bump `package.json` to the release version. 4. Bump `package.json` to the release version.
5. Build release metadata before tagging: 5. Build release metadata before tagging:
`bun run changelog:build --version <version> --date <yyyy-mm-dd>` `bun run changelog:build --version <version> --date <yyyy-mm-dd>`
- Release CI now also auto-runs this step when releasing directly from a tag and `changes/*.md` fragments remain.
6. Review `CHANGELOG.md` and `release/release-notes.md`. 6. Review `CHANGELOG.md` and `release/release-notes.md`.
7. Run release gate locally: 7. Run release gate locally:
`bun run changelog:check --version <version>` `bun run changelog:check --version <version>`
@@ -29,6 +30,8 @@ Notes:
- Pass `--date` explicitly when you want the release stamped with the local cut date; otherwise the generator uses the current ISO date, which can roll over to the next UTC day late at night. - Pass `--date` explicitly when you want the release stamped with the local cut date; otherwise the generator uses the current ISO date, which can roll over to the next UTC day late at night.
- `changelog:check` now rejects tag/package version mismatches. - `changelog:check` now rejects tag/package version mismatches.
- `changelog:build` generates `CHANGELOG.md` + `release/release-notes.md` and removes the released `changes/*.md` fragments. - `changelog:build` generates `CHANGELOG.md` + `release/release-notes.md` and removes the released `changes/*.md` fragments.
- In the same way, the release workflow now auto-runs `changelog:build` when it detects unreleased `changes/*.md` on a tag-based run, then verifies and publishes.
- Do not tag while `changes/*.md` fragments still exist. - Do not tag while `changes/*.md` fragments still exist.
- If you need to repair a published release body (for example, a prior versions section was omitted), regenerate notes from `CHANGELOG.md` and re-edit the release with `gh release edit --notes-file`.
- Tagged release workflow now also attempts to update `subminer-bin` on the AUR after GitHub Release publication. - Tagged release workflow now also attempts to update `subminer-bin` on the AUR after GitHub Release publication.
- Required GitHub Actions secret: `AUR_SSH_PRIVATE_KEY`. Add the matching public key to your AUR account before relying on the automation. - Required GitHub Actions secret: `AUR_SSH_PRIVATE_KEY`. Add the matching public key to your AUR account before relying on the automation.

View File

@@ -22,6 +22,7 @@ Read when: you need to find the owner module for a behavior or test surface
- Subtitle/token pipeline: `src/core/services/tokenizer*`, `src/subtitle/`, `src/tokenizers/` - Subtitle/token pipeline: `src/core/services/tokenizer*`, `src/subtitle/`, `src/tokenizers/`
- Anki workflow: `src/anki-integration/`, `src/core/services/anki-jimaku*.ts` - Anki workflow: `src/anki-integration/`, `src/core/services/anki-jimaku*.ts`
- Immersion tracking: `src/core/services/immersion-tracker/` - Immersion tracking: `src/core/services/immersion-tracker/`
Includes stats storage/query schema such as `imm_videos`, `imm_media_art`, and `imm_youtube_videos` for per-video and YouTube-specific library metadata.
- AniList tracking: `src/core/services/anilist/`, `src/main/runtime/composers/anilist-*` - AniList tracking: `src/core/services/anilist/`, `src/main/runtime/composers/anilist-*`
- Jellyfin integration: `src/core/services/jellyfin*.ts`, `src/main/runtime/composers/jellyfin-*` - Jellyfin integration: `src/core/services/jellyfin*.ts`, `src/main/runtime/composers/jellyfin-*`
- Window trackers: `src/window-trackers/` - Window trackers: `src/window-trackers/`

View File

@@ -553,10 +553,12 @@ export function buildSubminerScriptOpts(
socketPath: string, socketPath: string,
aniSkipMetadata: AniSkipMetadata | null, aniSkipMetadata: AniSkipMetadata | null,
logLevel: LogLevel = 'info', logLevel: LogLevel = 'info',
extraParts: string[] = [],
): string { ): string {
const parts = [ const parts = [
`subminer-binary_path=${sanitizeScriptOptValue(appPath)}`, `subminer-binary_path=${sanitizeScriptOptValue(appPath)}`,
`subminer-socket_path=${sanitizeScriptOptValue(socketPath)}`, `subminer-socket_path=${sanitizeScriptOptValue(socketPath)}`,
...extraParts.map(sanitizeScriptOptValue),
]; ];
if (logLevel !== 'info') { if (logLevel !== 'info') {
parts.push(`subminer-log_level=${sanitizeScriptOptValue(logLevel)}`); parts.push(`subminer-log_level=${sanitizeScriptOptValue(logLevel)}`);

View File

@@ -149,20 +149,16 @@ test('doctor command forwards refresh-known-words to app binary', () => {
context.args.doctorRefreshKnownWords = true; context.args.doctorRefreshKnownWords = true;
const forwarded: string[][] = []; const forwarded: string[][] = [];
assert.throws( const handled = runDoctorCommand(context, {
() =>
runDoctorCommand(context, {
commandExists: () => false, commandExists: () => false,
configExists: () => true, configExists: () => true,
resolveMainConfigPath: () => '/tmp/SubMiner/config.jsonc', resolveMainConfigPath: () => '/tmp/SubMiner/config.jsonc',
runAppCommandWithInherit: (_appPath, appArgs) => { runAppCommandWithInherit: (_appPath, appArgs) => {
forwarded.push(appArgs); forwarded.push(appArgs);
throw new ExitSignal(0);
}, },
}), });
(error: unknown) => error instanceof ExitSignal && error.code === 0,
);
assert.equal(handled, true);
assert.deepEqual(forwarded, [['--refresh-known-words']]); assert.deepEqual(forwarded, [['--refresh-known-words']]);
}); });
@@ -187,31 +183,25 @@ test('dictionary command forwards --dictionary and target path to app binary', (
context.args.dictionaryTarget = '/tmp/anime'; context.args.dictionaryTarget = '/tmp/anime';
const forwarded: string[][] = []; const forwarded: string[][] = [];
assert.throws( const handled = runDictionaryCommand(context, {
() =>
runDictionaryCommand(context, {
runAppCommandWithInherit: (_appPath, appArgs) => { runAppCommandWithInherit: (_appPath, appArgs) => {
forwarded.push(appArgs); forwarded.push(appArgs);
throw new ExitSignal(0);
}, },
}), });
(error: unknown) => error instanceof ExitSignal && error.code === 0,
);
assert.equal(handled, true);
assert.deepEqual(forwarded, [['--dictionary', '--dictionary-target', '/tmp/anime']]); assert.deepEqual(forwarded, [['--dictionary', '--dictionary-target', '/tmp/anime']]);
}); });
test('dictionary command throws if app handoff unexpectedly returns', () => { test('dictionary command returns after app handoff starts', () => {
const context = createContext(); const context = createContext();
context.args.dictionary = true; context.args.dictionary = true;
assert.throws( const handled = runDictionaryCommand(context, {
() => runAppCommandWithInherit: () => undefined,
runDictionaryCommand(context, { });
runAppCommandWithInherit: () => undefined as never,
}), assert.equal(handled, true);
/unexpectedly returned/,
);
}); });
test('stats command launches attached app command with response path', async () => { test('stats command launches attached app command with response path', async () => {

View File

@@ -2,7 +2,7 @@ import { runAppCommandWithInherit } from '../mpv.js';
import type { LauncherCommandContext } from './context.js'; import type { LauncherCommandContext } from './context.js';
interface DictionaryCommandDeps { interface DictionaryCommandDeps {
runAppCommandWithInherit: (appPath: string, appArgs: string[]) => never; runAppCommandWithInherit: (appPath: string, appArgs: string[]) => void;
} }
const defaultDeps: DictionaryCommandDeps = { const defaultDeps: DictionaryCommandDeps = {
@@ -27,5 +27,5 @@ export function runDictionaryCommand(
} }
deps.runAppCommandWithInherit(appPath, forwarded); deps.runAppCommandWithInherit(appPath, forwarded);
throw new Error('Dictionary command app handoff unexpectedly returned.'); return true;
} }

View File

@@ -9,7 +9,7 @@ interface DoctorCommandDeps {
commandExists(command: string): boolean; commandExists(command: string): boolean;
configExists(path: string): boolean; configExists(path: string): boolean;
resolveMainConfigPath(): string; resolveMainConfigPath(): string;
runAppCommandWithInherit(appPath: string, appArgs: string[]): never; runAppCommandWithInherit(appPath: string, appArgs: string[]): void;
} }
const defaultDeps: DoctorCommandDeps = { const defaultDeps: DoctorCommandDeps = {
@@ -85,6 +85,7 @@ export function runDoctorCommand(
return true; return true;
} }
deps.runAppCommandWithInherit(appPath, ['--refresh-known-words']); deps.runAppCommandWithInherit(appPath, ['--refresh-known-words']);
return true;
} }
const hasHardFailure = checks.some((entry) => const hasHardFailure = checks.some((entry) =>

View File

@@ -21,6 +21,7 @@ export async function runJellyfinCommand(context: LauncherCommandContext): Promi
if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel); if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel);
appendPasswordStore(forwarded); appendPasswordStore(forwarded);
runAppCommandWithInherit(appPath, forwarded); runAppCommandWithInherit(appPath, forwarded);
return true;
} }
if (args.jellyfinLogin) { if (args.jellyfinLogin) {
@@ -44,6 +45,7 @@ export async function runJellyfinCommand(context: LauncherCommandContext): Promi
if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel); if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel);
appendPasswordStore(forwarded); appendPasswordStore(forwarded);
runAppCommandWithInherit(appPath, forwarded); runAppCommandWithInherit(appPath, forwarded);
return true;
} }
if (args.jellyfinLogout) { if (args.jellyfinLogout) {
@@ -51,6 +53,7 @@ export async function runJellyfinCommand(context: LauncherCommandContext): Promi
if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel); if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel);
appendPasswordStore(forwarded); appendPasswordStore(forwarded);
runAppCommandWithInherit(appPath, forwarded); runAppCommandWithInherit(appPath, forwarded);
return true;
} }
if (args.jellyfinPlay) { if (args.jellyfinPlay) {
@@ -69,13 +72,8 @@ export async function runJellyfinCommand(context: LauncherCommandContext): Promi
if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel); if (args.logLevel !== 'info') forwarded.push('--log-level', args.logLevel);
appendPasswordStore(forwarded); appendPasswordStore(forwarded);
runAppCommandWithInherit(appPath, forwarded); runAppCommandWithInherit(appPath, forwarded);
return true;
} }
return Boolean( return false;
args.jellyfin ||
args.jellyfinLogin ||
args.jellyfinLogout ||
args.jellyfinPlay ||
args.jellyfinDiscovery,
);
} }

View File

@@ -0,0 +1,113 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import type { LauncherCommandContext } from './context.js';
import { runPlaybackCommandWithDeps } from './playback-command.js';
function createContext(): LauncherCommandContext {
return {
args: {
backend: 'auto',
directory: '.',
recursive: false,
profile: '',
startOverlay: false,
youtubeMode: 'download',
whisperBin: '',
whisperModel: '',
whisperVadModel: '',
whisperThreads: 0,
youtubeSubgenOutDir: '',
youtubeSubgenAudioFormat: '',
youtubeSubgenKeepTemp: false,
youtubeFixWithAi: false,
youtubePrimarySubLangs: [],
youtubeSecondarySubLangs: [],
youtubeAudioLangs: [],
youtubeWhisperSourceLanguage: '',
aiConfig: {},
useTexthooker: false,
autoStartOverlay: false,
texthookerOnly: false,
useRofi: false,
logLevel: 'info',
passwordStore: '',
target: 'https://www.youtube.com/watch?v=65Ovd7t8sNw',
targetKind: 'url',
jimakuApiKey: '',
jimakuApiKeyCommand: '',
jimakuApiBaseUrl: '',
jimakuLanguagePreference: 'ja',
jimakuMaxEntryResults: 20,
jellyfin: false,
jellyfinLogin: false,
jellyfinLogout: false,
jellyfinPlay: false,
jellyfinDiscovery: false,
dictionary: false,
stats: false,
doctor: false,
doctorRefreshKnownWords: false,
configPath: false,
configShow: false,
mpvIdle: false,
mpvSocket: false,
mpvStatus: false,
mpvArgs: '',
appPassthrough: false,
appArgs: [],
jellyfinServer: '',
jellyfinUsername: '',
jellyfinPassword: '',
},
scriptPath: '/tmp/subminer',
scriptName: 'subminer',
mpvSocketPath: '/tmp/subminer.sock',
pluginRuntimeConfig: {
socketPath: '/tmp/subminer.sock',
autoStart: true,
autoStartVisibleOverlay: true,
autoStartPauseUntilReady: true,
},
appPath: '/tmp/SubMiner.AppImage',
launcherJellyfinConfig: {},
processAdapter: {
platform: () => 'linux',
onSignal: () => {},
writeStdout: () => {},
exit: (_code: number): never => {
throw new Error('unexpected exit');
},
setExitCode: () => {},
},
};
}
test('youtube playback launches overlay with youtube-play args in the primary app start', async () => {
const calls: string[] = [];
const context = createContext();
await runPlaybackCommandWithDeps(context, {
ensurePlaybackSetupReady: async () => {},
chooseTarget: async (_args, _scriptPath) => ({ target: context.args.target, kind: 'url' }),
checkDependencies: () => {},
registerCleanup: () => {},
startMpv: async () => {
calls.push('startMpv');
},
waitForUnixSocketReady: async () => true,
startOverlay: async (_appPath, _args, _socketPath, extraAppArgs = []) => {
calls.push(`startOverlay:${extraAppArgs.join(' ')}`);
},
launchAppCommandDetached: (_appPath: string, appArgs: string[]) => {
calls.push(`launch:${appArgs.join(' ')}`);
},
log: () => {},
cleanupPlaybackSession: async () => {},
getMpvProc: () => null,
});
assert.deepEqual(calls, [
'startMpv',
'startOverlay:--youtube-play https://www.youtube.com/watch?v=65Ovd7t8sNw --youtube-mode download',
]);
});

View File

@@ -6,13 +6,13 @@ import { commandExists, isYoutubeTarget, realpathMaybe, resolvePathMaybe } from
import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js'; import { collectVideos, showFzfMenu, showRofiMenu } from '../picker.js';
import { import {
cleanupPlaybackSession, cleanupPlaybackSession,
launchAppCommandDetached,
startMpv, startMpv,
startOverlay, startOverlay,
state, state,
stopOverlay, stopOverlay,
waitForUnixSocketReady, waitForUnixSocketReady,
} from '../mpv.js'; } from '../mpv.js';
import { generateYoutubeSubtitles } from '../youtube.js';
import type { Args } from '../types.js'; import type { Args } from '../types.js';
import type { LauncherCommandContext } from './context.js'; import type { LauncherCommandContext } from './context.js';
import { ensureLauncherSetupReady } from '../setup-gate.js'; import { ensureLauncherSetupReady } from '../setup-gate.js';
@@ -126,30 +126,66 @@ async function ensurePlaybackSetupReady(context: LauncherCommandContext): Promis
} }
export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> { export async function runPlaybackCommand(context: LauncherCommandContext): Promise<void> {
return runPlaybackCommandWithDeps(context, {
ensurePlaybackSetupReady,
chooseTarget,
checkDependencies,
registerCleanup,
startMpv,
waitForUnixSocketReady,
startOverlay,
launchAppCommandDetached,
log,
cleanupPlaybackSession,
getMpvProc: () => state.mpvProc,
});
}
type PlaybackCommandDeps = {
ensurePlaybackSetupReady: (context: LauncherCommandContext) => Promise<void>;
chooseTarget: (
args: Args,
scriptPath: string,
) => Promise<{ target: string; kind: 'file' | 'url' } | null>;
checkDependencies: (args: Args) => void;
registerCleanup: (context: LauncherCommandContext) => void;
startMpv: typeof startMpv;
waitForUnixSocketReady: typeof waitForUnixSocketReady;
startOverlay: typeof startOverlay;
launchAppCommandDetached: typeof launchAppCommandDetached;
log: typeof log;
cleanupPlaybackSession: typeof cleanupPlaybackSession;
getMpvProc: () => typeof state.mpvProc;
};
export async function runPlaybackCommandWithDeps(
context: LauncherCommandContext,
deps: PlaybackCommandDeps,
): Promise<void> {
const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context; const { args, appPath, scriptPath, mpvSocketPath, pluginRuntimeConfig, processAdapter } = context;
if (!appPath) { if (!appPath) {
fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.'); fail('SubMiner AppImage not found. Install to ~/.local/bin/ or set SUBMINER_APPIMAGE_PATH.');
} }
await ensurePlaybackSetupReady(context); await deps.ensurePlaybackSetupReady(context);
if (!args.target) { if (!args.target) {
checkPickerDependencies(args); checkPickerDependencies(args);
} }
const targetChoice = await chooseTarget(args, scriptPath); const targetChoice = await deps.chooseTarget(args, scriptPath);
if (!targetChoice) { if (!targetChoice) {
log('info', args.logLevel, 'No video selected, exiting'); deps.log('info', args.logLevel, 'No video selected, exiting');
processAdapter.exit(0); processAdapter.exit(0);
} }
checkDependencies({ deps.checkDependencies({
...args, ...args,
target: targetChoice ? targetChoice.target : args.target, target: targetChoice ? targetChoice.target : args.target,
targetKind: targetChoice ? targetChoice.kind : 'url', targetKind: targetChoice ? targetChoice.kind : 'url',
}); });
registerCleanup(context); deps.registerCleanup(context);
const selectedTarget = targetChoice const selectedTarget = targetChoice
? { ? {
@@ -159,30 +195,11 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
: { target: args.target, kind: 'url' as const }; : { target: args.target, kind: 'url' as const };
const isYoutubeUrl = selectedTarget.kind === 'url' && isYoutubeTarget(selectedTarget.target); const isYoutubeUrl = selectedTarget.kind === 'url' && isYoutubeTarget(selectedTarget.target);
let preloadedSubtitles: { primaryPath?: string; secondaryPath?: string } | undefined; const isAppOwnedYoutubeFlow = isYoutubeUrl;
const youtubeMode = args.youtubeMode ?? 'download';
if (isYoutubeUrl) { if (isYoutubeUrl) {
log('info', args.logLevel, 'YouTube subtitle generation: preload before mpv'); deps.log('info', args.logLevel, 'YouTube subtitle flow: app-owned picker after mpv bootstrap');
const generated = await generateYoutubeSubtitles(selectedTarget.target, args);
preloadedSubtitles = {
primaryPath: generated.primaryPath,
secondaryPath: generated.secondaryPath,
};
const primaryStatus = generated.primaryPath
? 'ready'
: generated.primaryNative
? 'native'
: 'missing';
const secondaryStatus = generated.secondaryPath
? 'ready'
: generated.secondaryNative
? 'native'
: 'missing';
log(
'info',
args.logLevel,
`YouTube subtitle result: primary=${primaryStatus}, secondary=${secondaryStatus}`,
);
} }
const shouldPauseUntilOverlayReady = const shouldPauseUntilOverlayReady =
@@ -191,47 +208,57 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
pluginRuntimeConfig.autoStartPauseUntilReady; pluginRuntimeConfig.autoStartPauseUntilReady;
if (shouldPauseUntilOverlayReady) { if (shouldPauseUntilOverlayReady) {
log('info', args.logLevel, 'Configured to pause mpv until overlay and tokenization are ready'); deps.log('info', args.logLevel, 'Configured to pause mpv until overlay and tokenization are ready');
} }
await startMpv( await deps.startMpv(
selectedTarget.target, selectedTarget.target,
selectedTarget.kind, selectedTarget.kind,
args, args,
mpvSocketPath, mpvSocketPath,
appPath, appPath,
preloadedSubtitles, undefined,
{ startPaused: shouldPauseUntilOverlayReady }, {
startPaused: shouldPauseUntilOverlayReady || isAppOwnedYoutubeFlow,
disableYoutubeSubtitleAutoLoad: isAppOwnedYoutubeFlow,
},
); );
const ready = await waitForUnixSocketReady(mpvSocketPath, 10000); const ready = await deps.waitForUnixSocketReady(mpvSocketPath, 10000);
const pluginAutoStartEnabled = pluginRuntimeConfig.autoStart; const pluginAutoStartEnabled = pluginRuntimeConfig.autoStart;
const shouldStartOverlay = args.startOverlay || args.autoStartOverlay; const shouldStartOverlay = args.startOverlay || args.autoStartOverlay || isAppOwnedYoutubeFlow;
if (shouldStartOverlay) { if (shouldStartOverlay) {
if (ready) { if (ready) {
log('info', args.logLevel, 'MPV IPC socket ready, starting SubMiner overlay'); deps.log('info', args.logLevel, 'MPV IPC socket ready, starting SubMiner overlay');
} else { } else {
log( deps.log(
'info', 'info',
args.logLevel, args.logLevel,
'MPV IPC socket not ready after timeout, starting SubMiner overlay anyway', 'MPV IPC socket not ready after timeout, starting SubMiner overlay anyway',
); );
} }
await startOverlay(appPath, args, mpvSocketPath); await deps.startOverlay(
appPath,
args,
mpvSocketPath,
isAppOwnedYoutubeFlow
? ['--youtube-play', selectedTarget.target, '--youtube-mode', youtubeMode]
: [],
);
} else if (pluginAutoStartEnabled) { } else if (pluginAutoStartEnabled) {
if (ready) { if (ready) {
log('info', args.logLevel, 'MPV IPC socket ready, relying on mpv plugin auto-start'); deps.log('info', args.logLevel, 'MPV IPC socket ready, relying on mpv plugin auto-start');
} else { } else {
log('info', args.logLevel, 'MPV IPC socket not ready yet, relying on mpv plugin auto-start'); deps.log('info', args.logLevel, 'MPV IPC socket not ready yet, relying on mpv plugin auto-start');
} }
} else if (ready) { } else if (ready) {
log( deps.log(
'info', 'info',
args.logLevel, args.logLevel,
'MPV IPC socket ready, overlay auto-start disabled (use y-s to start)', 'MPV IPC socket ready, overlay auto-start disabled (use y-s to start)',
); );
} else { } else {
log( deps.log(
'info', 'info',
args.logLevel, args.logLevel,
'MPV IPC socket not ready yet, overlay auto-start disabled (use y-s to start)', 'MPV IPC socket not ready yet, overlay auto-start disabled (use y-s to start)',
@@ -239,7 +266,7 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
} }
await new Promise<void>((resolve) => { await new Promise<void>((resolve) => {
const mpvProc = state.mpvProc; const mpvProc = deps.getMpvProc();
if (!mpvProc) { if (!mpvProc) {
stopOverlay(args); stopOverlay(args);
resolve(); resolve();
@@ -247,7 +274,7 @@ export async function runPlaybackCommand(context: LauncherCommandContext): Promi
} }
const finalize = (code: number | null | undefined) => { const finalize = (code: number | null | undefined) => {
void cleanupPlaybackSession(args).finally(() => { void deps.cleanupPlaybackSession(args).finally(() => {
processAdapter.setExitCode(code ?? 0); processAdapter.setExitCode(code ?? 0);
resolve(); resolve();
}); });

View File

@@ -111,6 +111,7 @@ export function createDefaultArgs(launcherConfig: LauncherYoutubeSubgenConfig):
youtubeSubgenAudioFormat: process.env.SUBMINER_YT_SUBGEN_AUDIO_FORMAT || 'm4a', youtubeSubgenAudioFormat: process.env.SUBMINER_YT_SUBGEN_AUDIO_FORMAT || 'm4a',
youtubeSubgenKeepTemp: process.env.SUBMINER_YT_SUBGEN_KEEP_TEMP === '1', youtubeSubgenKeepTemp: process.env.SUBMINER_YT_SUBGEN_KEEP_TEMP === '1',
youtubeFixWithAi: launcherConfig.fixWithAi === true, youtubeFixWithAi: launcherConfig.fixWithAi === true,
youtubeMode: undefined,
jimakuApiKey: process.env.SUBMINER_JIMAKU_API_KEY || '', jimakuApiKey: process.env.SUBMINER_JIMAKU_API_KEY || '',
jimakuApiKeyCommand: process.env.SUBMINER_JIMAKU_API_KEY_COMMAND || '', jimakuApiKeyCommand: process.env.SUBMINER_JIMAKU_API_KEY_COMMAND || '',
jimakuApiBaseUrl: process.env.SUBMINER_JIMAKU_API_BASE_URL || DEFAULT_JIMAKU_API_BASE_URL, jimakuApiBaseUrl: process.env.SUBMINER_JIMAKU_API_BASE_URL || DEFAULT_JIMAKU_API_BASE_URL,
@@ -250,6 +251,9 @@ export function applyInvocationsToArgs(parsed: Args, invocations: CliInvocations
} }
if (invocations.ytInvocation) { if (invocations.ytInvocation) {
if (invocations.ytInvocation.mode) {
parsed.youtubeMode = invocations.ytInvocation.mode;
}
if (invocations.ytInvocation.logLevel) if (invocations.ytInvocation.logLevel)
parsed.logLevel = parseLogLevel(invocations.ytInvocation.logLevel); parsed.logLevel = parseLogLevel(invocations.ytInvocation.logLevel);
if (invocations.ytInvocation.outDir) if (invocations.ytInvocation.outDir)

View File

@@ -16,6 +16,7 @@ export interface JellyfinInvocation {
export interface YtInvocation { export interface YtInvocation {
target?: string; target?: string;
mode?: 'download' | 'generate';
outDir?: string; outDir?: string;
keepTemp?: boolean; keepTemp?: boolean;
whisperBin?: string; whisperBin?: string;
@@ -222,6 +223,7 @@ export function parseCliPrograms(
.alias('youtube') .alias('youtube')
.description('YouTube workflows') .description('YouTube workflows')
.argument('[target]', 'YouTube URL or ytsearch: query') .argument('[target]', 'YouTube URL or ytsearch: query')
.option('--mode <mode>', 'YouTube subtitle acquisition mode')
.option('-o, --out-dir <dir>', 'Subtitle output dir') .option('-o, --out-dir <dir>', 'Subtitle output dir')
.option('--keep-temp', 'Keep temp files') .option('--keep-temp', 'Keep temp files')
.option('--whisper-bin <path>', 'whisper.cpp CLI path') .option('--whisper-bin <path>', 'whisper.cpp CLI path')
@@ -233,6 +235,10 @@ export function parseCliPrograms(
.action((target: string | undefined, options: Record<string, unknown>) => { .action((target: string | undefined, options: Record<string, unknown>) => {
ytInvocation = { ytInvocation = {
target, target,
mode:
typeof options.mode === 'string' && (options.mode === 'download' || options.mode === 'generate')
? options.mode
: undefined,
outDir: typeof options.outDir === 'string' ? options.outDir : undefined, outDir: typeof options.outDir === 'string' ? options.outDir : undefined,
keepTemp: options.keepTemp === true, keepTemp: options.keepTemp === true,
whisperBin: typeof options.whisperBin === 'string' ? options.whisperBin : undefined, whisperBin: typeof options.whisperBin === 'string' ? options.whisperBin : undefined,

View File

@@ -1,9 +1,10 @@
import test from 'node:test'; import test from 'node:test';
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import path from 'node:path'; import path from 'node:path';
import { getDefaultMpvLogFile } from './types.js'; import { getDefaultLauncherLogFile, getDefaultMpvLogFile } from './types.js';
test('getDefaultMpvLogFile uses APPDATA on windows', () => { test('getDefaultMpvLogFile uses APPDATA on windows', () => {
const today = new Date().toISOString().slice(0, 10);
const resolved = getDefaultMpvLogFile({ const resolved = getDefaultMpvLogFile({
platform: 'win32', platform: 'win32',
homeDir: 'C:\\Users\\tester', homeDir: 'C:\\Users\\tester',
@@ -17,8 +18,27 @@ test('getDefaultMpvLogFile uses APPDATA on windows', () => {
'C:\\Users\\tester\\AppData\\Roaming', 'C:\\Users\\tester\\AppData\\Roaming',
'SubMiner', 'SubMiner',
'logs', 'logs',
`SubMiner-${new Date().toISOString().slice(0, 10)}.log`, `mpv-${today}.log`,
), ),
), ),
); );
}); });
test('getDefaultLauncherLogFile uses launcher prefix', () => {
const today = new Date().toISOString().slice(0, 10);
const resolved = getDefaultLauncherLogFile({
platform: 'linux',
homeDir: '/home/tester',
});
assert.equal(
resolved,
path.join(
'/home/tester',
'.config',
'SubMiner',
'logs',
`launcher-${today}.log`,
),
);
});

View File

@@ -1,7 +1,6 @@
import fs from 'node:fs';
import path from 'node:path';
import type { LogLevel } from './types.js'; import type { LogLevel } from './types.js';
import { DEFAULT_MPV_LOG_FILE } from './types.js'; import { DEFAULT_MPV_LOG_FILE, getDefaultLauncherLogFile } from './types.js';
import { appendLogLine, resolveDefaultLogFilePath } from '../src/shared/log-files.js';
export const COLORS = { export const COLORS = {
red: '\x1b[0;31m', red: '\x1b[0;31m',
@@ -28,14 +27,32 @@ export function getMpvLogPath(): string {
return DEFAULT_MPV_LOG_FILE; return DEFAULT_MPV_LOG_FILE;
} }
export function appendToMpvLog(message: string): void { export function getLauncherLogPath(): string {
const logPath = getMpvLogPath(); const envPath = process.env.SUBMINER_LAUNCHER_LOG?.trim();
try { if (envPath) return envPath;
fs.mkdirSync(path.dirname(logPath), { recursive: true }); return getDefaultLauncherLogFile();
fs.appendFileSync(logPath, `[${new Date().toISOString()}] ${message}\n`, { encoding: 'utf8' });
} catch {
// ignore logging failures
} }
export function getAppLogPath(): string {
const envPath = process.env.SUBMINER_APP_LOG?.trim();
if (envPath) return envPath;
return resolveDefaultLogFilePath('app');
}
function appendTimestampedLog(logPath: string, message: string): void {
appendLogLine(logPath, `[${new Date().toISOString()}] ${message}`);
}
export function appendToMpvLog(message: string): void {
appendTimestampedLog(getMpvLogPath(), message);
}
export function appendToLauncherLog(message: string): void {
appendTimestampedLog(getLauncherLogPath(), message);
}
export function appendToAppLog(message: string): void {
appendTimestampedLog(getAppLogPath(), message);
} }
export function log(level: LogLevel, configured: LogLevel, message: string): void { export function log(level: LogLevel, configured: LogLevel, message: string): void {
@@ -49,11 +66,11 @@ export function log(level: LogLevel, configured: LogLevel, message: string): voi
? COLORS.red ? COLORS.red
: COLORS.cyan; : COLORS.cyan;
process.stdout.write(`${color}[${level.toUpperCase()}]${COLORS.reset} ${message}\n`); process.stdout.write(`${color}[${level.toUpperCase()}]${COLORS.reset} ${message}\n`);
appendToMpvLog(`[${level.toUpperCase()}] ${message}`); appendToLauncherLog(`[${level.toUpperCase()}] ${message}`);
} }
export function fail(message: string): never { export function fail(message: string): never {
process.stderr.write(`${COLORS.red}[ERROR]${COLORS.reset} ${message}\n`); process.stderr.write(`${COLORS.red}[ERROR]${COLORS.reset} ${message}\n`);
appendToMpvLog(`[ERROR] ${message}`); appendToLauncherLog(`[ERROR] ${message}`);
process.exit(1); process.exit(1);
} }

View File

@@ -205,136 +205,6 @@ test('doctor refresh-known-words forwards app refresh command without requiring
}); });
}); });
test('youtube command rejects removed --mode option', () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const appPath = path.join(root, 'fake-subminer.sh');
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
fs.chmodSync(appPath, 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
SUBMINER_APPIMAGE_PATH: appPath,
};
const result = runLauncher(
['youtube', 'https://www.youtube.com/watch?v=test123', '--mode', 'automatic'],
env,
);
assert.equal(result.status, 1);
assert.match(result.stderr, /unknown option '--mode'/i);
});
});
test('youtube playback generates subtitles before mpv launch', { timeout: 15000 }, () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const binDir = path.join(root, 'bin');
const appPath = path.join(root, 'fake-subminer.sh');
const ytdlpLogPath = path.join(root, 'yt-dlp.log');
const mpvCapturePath = path.join(root, 'mpv-order.txt');
const mpvArgsPath = path.join(root, 'mpv-args.txt');
const socketPath = path.join(root, 'mpv.sock');
const bunBinary = JSON.stringify(process.execPath.replace(/\\/g, '/'));
fs.mkdirSync(binDir, { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'SubMiner'), { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
fs.writeFileSync(
path.join(xdgConfigHome, 'SubMiner', 'setup-state.json'),
JSON.stringify({
version: 1,
status: 'completed',
completedAt: '2026-03-08T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'installed',
pluginInstallPathSummary: null,
}),
);
fs.writeFileSync(
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
`socket_path=${socketPath}\nauto_start=no\nauto_start_visible_overlay=no\nauto_start_pause_until_ready=no\n`,
);
fs.writeFileSync(appPath, '#!/bin/sh\nexit 0\n');
fs.chmodSync(appPath, 0o755);
fs.writeFileSync(
path.join(binDir, 'yt-dlp'),
`#!/bin/sh
set -eu
printf '%s\\n' "$*" >> "$SUBMINER_TEST_YTDLP_LOG"
if printf '%s\\n' "$*" | grep -q -- '--dump-single-json'; then
printf '{"id":"video123"}\\n'
exit 0
fi
out_dir=""
prev=""
for arg in "$@"; do
if [ "$prev" = "-o" ]; then
out_dir=$(dirname "$arg")
break
fi
prev="$arg"
done
mkdir -p "$out_dir"
printf '1\\n00:00:00,000 --> 00:00:01,000\\nこんにちは\\n' > "$out_dir/video123.ja.srt"
printf '1\\n00:00:00,000 --> 00:00:01,000\\nhello\\n' > "$out_dir/video123.en.srt"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
fs.writeFileSync(
path.join(binDir, 'mpv'),
`#!/bin/sh
set -eu
if [ -s "$SUBMINER_TEST_YTDLP_LOG" ]; then
printf 'generated-before-mpv\\n' > "$SUBMINER_TEST_MPV_ORDER"
else
printf 'mpv-before-generation\\n' > "$SUBMINER_TEST_MPV_ORDER"
fi
printf '%s\\n' "$@" > "$SUBMINER_TEST_MPV_ARGS"
socket_path=""
for arg in "$@"; do
case "$arg" in
--input-ipc-server=*)
socket_path="\${arg#--input-ipc-server=}"
;;
esac
done
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const path=require('node:path'); const socket=process.argv[1]||''; try{ if(socket) fs.mkdirSync(path.dirname(socket),{recursive:true}); }catch{} try{ if(socket) fs.rmSync(socket,{force:true}); }catch{} const server=net.createServer((c)=>c.end()); server.on('error',()=>process.exit(0)); if(!socket) process.exit(0); try{ server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250)); } catch { process.exit(0); }" "$socket_path"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
PATH: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
Path: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
SUBMINER_APPIMAGE_PATH: appPath,
SUBMINER_TEST_YTDLP_LOG: ytdlpLogPath,
SUBMINER_TEST_MPV_ORDER: mpvCapturePath,
SUBMINER_TEST_MPV_ARGS: mpvArgsPath,
};
const result = runLauncher(['youtube', 'https://www.youtube.com/watch?v=test123'], env);
assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
assert.equal(fs.readFileSync(mpvCapturePath, 'utf8').trim(), 'generated-before-mpv');
assert.match(
fs.readFileSync(mpvArgsPath, 'utf8'),
/https:\/\/www\.youtube\.com\/watch\?v=test123/,
);
assert.match(fs.readFileSync(ytdlpLogPath, 'utf8'), /--dump-single-json/);
});
});
test('launcher forwards --args to mpv as parsed tokens', { timeout: 15000 }, () => { test('launcher forwards --args to mpv as parsed tokens', { timeout: 15000 }, () => {
withTempDir((root) => { withTempDir((root) => {
const homeDir = path.join(root, 'home'); const homeDir = path.join(root, 'home');
@@ -387,6 +257,10 @@ ${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); con
'utf8', 'utf8',
); );
fs.chmodSync(path.join(binDir, 'mpv'), 0o755); fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
fs.writeFileSync(path.join(binDir, 'yt-dlp'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
const env = { const env = {
...makeTestEnv(homeDir, xdgConfigHome), ...makeTestEnv(homeDir, xdgConfigHome),
@@ -466,6 +340,10 @@ ${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); con
'utf8', 'utf8',
); );
fs.chmodSync(path.join(binDir, 'mpv'), 0o755); fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
fs.writeFileSync(path.join(binDir, 'yt-dlp'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
const env = { const env = {
...makeTestEnv(homeDir, xdgConfigHome), ...makeTestEnv(homeDir, xdgConfigHome),
@@ -484,6 +362,84 @@ ${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); con
}); });
}); });
test('launcher disables plugin startup pause gate for app-owned youtube flow', { timeout: 15000 }, () => {
withTempDir((root) => {
const homeDir = path.join(root, 'home');
const xdgConfigHome = path.join(root, 'xdg');
const binDir = path.join(root, 'bin');
const appPath = path.join(root, 'fake-subminer.sh');
const mpvArgsPath = path.join(root, 'mpv-args.txt');
const socketPath = path.join(root, 'mpv.sock');
const bunBinary = JSON.stringify(process.execPath.replace(/\\/g, '/'));
fs.mkdirSync(binDir, { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'SubMiner'), { recursive: true });
fs.mkdirSync(path.join(xdgConfigHome, 'mpv', 'script-opts'), { recursive: true });
fs.writeFileSync(
path.join(xdgConfigHome, 'SubMiner', 'setup-state.json'),
JSON.stringify({
version: 1,
status: 'completed',
completedAt: '2026-03-08T00:00:00.000Z',
completionSource: 'user',
lastSeenYomitanDictionaryCount: 0,
pluginInstallStatus: 'installed',
pluginInstallPathSummary: null,
}),
);
fs.writeFileSync(
path.join(xdgConfigHome, 'mpv', 'script-opts', 'subminer.conf'),
`socket_path=${socketPath}\nauto_start=yes\nauto_start_visible_overlay=yes\nauto_start_pause_until_ready=yes\n`,
);
fs.writeFileSync(
appPath,
'#!/bin/sh\nif [ -n "$SUBMINER_TEST_CAPTURE" ]; then printf "%s\\n" "$@" > "$SUBMINER_TEST_CAPTURE"; fi\nexit 0\n',
);
fs.chmodSync(appPath, 0o755);
fs.writeFileSync(
path.join(binDir, 'mpv'),
`#!/bin/sh
set -eu
printf '%s\\n' "$@" > "$SUBMINER_TEST_MPV_ARGS"
socket_path=""
for arg in "$@"; do
case "$arg" in
--input-ipc-server=*)
socket_path="\${arg#--input-ipc-server=}"
;;
esac
done
${bunBinary} -e "const net=require('node:net'); const fs=require('node:fs'); const path=require('node:path'); const socket=process.argv[1]||''; try{ if (socket) fs.mkdirSync(path.dirname(socket),{recursive:true}); }catch{} try{ if (socket) fs.rmSync(socket,{force:true}); }catch{} if(!socket) process.exit(0); const server=net.createServer((c)=>c.end()); server.on('error',()=>process.exit(0)); try{ server.listen(socket,()=>setTimeout(()=>server.close(()=>process.exit(0)),250)); } catch { process.exit(0); }" "$socket_path"
`,
'utf8',
);
fs.chmodSync(path.join(binDir, 'mpv'), 0o755);
fs.writeFileSync(path.join(binDir, 'yt-dlp'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.writeFileSync(path.join(binDir, 'ffmpeg'), '#!/bin/sh\nexit 0\n', 'utf8');
fs.chmodSync(path.join(binDir, 'yt-dlp'), 0o755);
fs.chmodSync(path.join(binDir, 'ffmpeg'), 0o755);
const env = {
...makeTestEnv(homeDir, xdgConfigHome),
PATH: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
Path: `${binDir}${path.delimiter}${process.env.Path || process.env.PATH || ''}`,
DISPLAY: ':99',
XDG_SESSION_TYPE: 'x11',
SUBMINER_APPIMAGE_PATH: appPath,
SUBMINER_TEST_MPV_ARGS: mpvArgsPath,
SUBMINER_TEST_CAPTURE: path.join(root, 'captured-args.txt'),
};
const result = runLauncher(['yt', 'https://www.youtube.com/watch?v=abc123'], env);
assert.equal(result.status, 0, `stdout:\n${result.stdout}\nstderr:\n${result.stderr}`);
assert.match(
fs.readFileSync(mpvArgsPath, 'utf8'),
/--script-opts=.*subminer-auto_start_pause_until_ready=no/,
);
});
});
test('dictionary command forwards --dictionary and --dictionary-target to app command path', () => { test('dictionary command forwards --dictionary and --dictionary-target to app command path', () => {
withTempDir((root) => { withTempDir((root) => {
const homeDir = path.join(root, 'home'); const homeDir = path.join(root, 'home');

View File

@@ -302,7 +302,47 @@ test('startOverlay resolves without fixed 2s sleep when readiness signals arrive
} }
}); });
test('cleanupPlaybackSession preserves background app while stopping mpv-owned children', async () => { test('startOverlay captures app stdout and stderr into app log', async () => {
const { dir, socketPath } = createTempSocketPath();
const appPath = path.join(dir, 'fake-subminer.sh');
const appLogPath = path.join(dir, 'app.log');
const originalAppLog = process.env.SUBMINER_APP_LOG;
fs.writeFileSync(
appPath,
'#!/bin/sh\nprintf "hello from stdout\\n"\nprintf "hello from stderr\\n" >&2\nexit 0\n',
);
fs.chmodSync(appPath, 0o755);
fs.writeFileSync(socketPath, '');
const originalCreateConnection = net.createConnection;
try {
process.env.SUBMINER_APP_LOG = appLogPath;
net.createConnection = (() => {
const socket = new EventEmitter() as net.Socket;
socket.destroy = (() => socket) as net.Socket['destroy'];
socket.setTimeout = (() => socket) as net.Socket['setTimeout'];
setTimeout(() => socket.emit('connect'), 10);
return socket;
}) as typeof net.createConnection;
await startOverlay(appPath, makeArgs(), socketPath);
const logText = fs.readFileSync(appLogPath, 'utf8');
assert.match(logText, /\[STDOUT\] hello from stdout/);
assert.match(logText, /\[STDERR\] hello from stderr/);
} finally {
net.createConnection = originalCreateConnection;
state.overlayProc = null;
state.overlayManagedByLauncher = false;
if (originalAppLog === undefined) {
delete process.env.SUBMINER_APP_LOG;
} else {
process.env.SUBMINER_APP_LOG = originalAppLog;
}
fs.rmSync(dir, { recursive: true, force: true });
}
});
test('cleanupPlaybackSession stops launcher-managed overlay app and mpv-owned children', async () => {
const { dir } = createTempSocketPath(); const { dir } = createTempSocketPath();
const appPath = path.join(dir, 'fake-subminer.sh'); const appPath = path.join(dir, 'fake-subminer.sh');
const appInvocationsPath = path.join(dir, 'app-invocations.log'); const appInvocationsPath = path.join(dir, 'app-invocations.log');
@@ -345,8 +385,8 @@ test('cleanupPlaybackSession preserves background app while stopping mpv-owned c
try { try {
await cleanupPlaybackSession(makeArgs()); await cleanupPlaybackSession(makeArgs());
assert.deepEqual(calls, ['mpv-kill', 'helper-kill']); assert.deepEqual(calls, ['overlay-kill', 'mpv-kill', 'helper-kill']);
assert.equal(fs.existsSync(appInvocationsPath), false); assert.match(fs.readFileSync(appInvocationsPath, 'utf8'), /--stop/);
} finally { } finally {
state.overlayProc = null; state.overlayProc = null;
state.mpvProc = null; state.mpvProc = null;

View File

@@ -5,7 +5,7 @@ import net from 'node:net';
import { spawn, spawnSync } from 'node:child_process'; import { spawn, spawnSync } from 'node:child_process';
import type { LogLevel, Backend, Args, MpvTrack } from './types.js'; import type { LogLevel, Backend, Args, MpvTrack } from './types.js';
import { DEFAULT_MPV_SUBMINER_ARGS, DEFAULT_YOUTUBE_YTDL_FORMAT } from './types.js'; import { DEFAULT_MPV_SUBMINER_ARGS, DEFAULT_YOUTUBE_YTDL_FORMAT } from './types.js';
import { log, fail, getMpvLogPath } from './log.js'; import { appendToAppLog, getAppLogPath, log, fail, getMpvLogPath } from './log.js';
import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js'; import { buildSubminerScriptOpts, resolveAniSkipMetadataForFile } from './aniskip-metadata.js';
import { import {
commandExists, commandExists,
@@ -542,7 +542,7 @@ export async function startMpv(
socketPath: string, socketPath: string,
appPath: string, appPath: string,
preloadedSubtitles?: { primaryPath?: string; secondaryPath?: string }, preloadedSubtitles?: { primaryPath?: string; secondaryPath?: string },
options?: { startPaused?: boolean }, options?: { startPaused?: boolean; disableYoutubeSubtitleAutoLoad?: boolean },
): Promise<void> { ): Promise<void> {
if (targetKind === 'file' && (!fs.existsSync(target) || !fs.statSync(target).isFile())) { if (targetKind === 'file' && (!fs.existsSync(target) || !fs.statSync(target).isFile())) {
fail(`Video file not found: ${target}`); fail(`Video file not found: ${target}`);
@@ -575,6 +575,7 @@ export async function startMpv(
log('debug', args.logLevel, `YouTube subtitle langs: ${subtitleLangs}`); log('debug', args.logLevel, `YouTube subtitle langs: ${subtitleLangs}`);
log('debug', args.logLevel, `YouTube audio langs: ${audioLangs}`); log('debug', args.logLevel, `YouTube audio langs: ${audioLangs}`);
mpvArgs.push(`--ytdl-format=${DEFAULT_YOUTUBE_YTDL_FORMAT}`, `--alang=${audioLangs}`); mpvArgs.push(`--ytdl-format=${DEFAULT_YOUTUBE_YTDL_FORMAT}`, `--alang=${audioLangs}`);
if (options?.disableYoutubeSubtitleAutoLoad !== true) {
mpvArgs.push( mpvArgs.push(
'--sub-auto=fuzzy', '--sub-auto=fuzzy',
`--slang=${subtitleLangs}`, `--slang=${subtitleLangs}`,
@@ -582,6 +583,9 @@ export async function startMpv(
'--ytdl-raw-options-append=sub-format=vtt/best', '--ytdl-raw-options-append=sub-format=vtt/best',
`--ytdl-raw-options-append=sub-langs=${subtitleLangs}`, `--ytdl-raw-options-append=sub-langs=${subtitleLangs}`,
); );
} else {
mpvArgs.push('--sub-auto=no');
}
} }
} }
@@ -597,7 +601,17 @@ export async function startMpv(
const aniSkipMetadata = shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles) const aniSkipMetadata = shouldResolveAniSkipMetadata(target, targetKind, preloadedSubtitles)
? await resolveAniSkipMetadataForFile(target) ? await resolveAniSkipMetadataForFile(target)
: null; : null;
const scriptOpts = buildSubminerScriptOpts(appPath, socketPath, aniSkipMetadata, args.logLevel); const extraScriptOpts =
targetKind === 'url' && isYoutubeTarget(target) && options?.disableYoutubeSubtitleAutoLoad === true
? ['subminer-auto_start_pause_until_ready=no']
: [];
const scriptOpts = buildSubminerScriptOpts(
appPath,
socketPath,
aniSkipMetadata,
args.logLevel,
extraScriptOpts,
);
if (aniSkipMetadata) { if (aniSkipMetadata) {
log( log(
'debug', 'debug',
@@ -661,19 +675,25 @@ async function waitForOverlayStartCommandSettled(
}); });
} }
export async function startOverlay(appPath: string, args: Args, socketPath: string): Promise<void> { export async function startOverlay(
appPath: string,
args: Args,
socketPath: string,
extraAppArgs: string[] = [],
): Promise<void> {
const backend = detectBackend(args.backend); const backend = detectBackend(args.backend);
log('info', args.logLevel, `Starting SubMiner overlay (backend: ${backend})...`); log('info', args.logLevel, `Starting SubMiner overlay (backend: ${backend})...`);
const overlayArgs = ['--start', '--backend', backend, '--socket', socketPath]; const overlayArgs = ['--start', '--backend', backend, '--socket', socketPath, ...extraAppArgs];
if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel); if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel);
if (args.useTexthooker) overlayArgs.push('--texthooker'); if (args.useTexthooker) overlayArgs.push('--texthooker');
const target = resolveAppSpawnTarget(appPath, overlayArgs); const target = resolveAppSpawnTarget(appPath, overlayArgs);
state.overlayProc = spawn(target.command, target.args, { state.overlayProc = spawn(target.command, target.args, {
stdio: 'inherit', stdio: ['ignore', 'pipe', 'pipe'],
env: buildAppEnv(), env: buildAppEnv(),
}); });
attachAppProcessLogging(state.overlayProc);
state.overlayManagedByLauncher = true; state.overlayManagedByLauncher = true;
const [socketReady] = await Promise.all([ const [socketReady] = await Promise.all([
@@ -699,10 +719,7 @@ export function launchTexthookerOnly(appPath: string, args: Args): never {
if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel); if (args.logLevel !== 'info') overlayArgs.push('--log-level', args.logLevel);
log('info', args.logLevel, 'Launching texthooker mode...'); log('info', args.logLevel, 'Launching texthooker mode...');
const result = spawnSync(appPath, overlayArgs, { const result = runSyncAppCommand(appPath, overlayArgs, true);
stdio: 'inherit',
env: buildAppEnv(),
});
if (result.error) { if (result.error) {
fail(`Failed to launch texthooker mode: ${result.error.message}`); fail(`Failed to launch texthooker mode: ${result.error.message}`);
} }
@@ -713,30 +730,7 @@ export function stopOverlay(args: Args): void {
if (state.stopRequested) return; if (state.stopRequested) return;
state.stopRequested = true; state.stopRequested = true;
if (state.overlayManagedByLauncher && state.appPath) { stopManagedOverlayApp(args);
log('info', args.logLevel, 'Stopping SubMiner overlay...');
const stopArgs = ['--stop'];
if (args.logLevel !== 'info') stopArgs.push('--log-level', args.logLevel);
const result = spawnSync(state.appPath, stopArgs, {
stdio: 'ignore',
env: buildAppEnv(),
});
if (result.error) {
log('warn', args.logLevel, `Failed to stop SubMiner overlay: ${result.error.message}`);
} else if (typeof result.status === 'number' && result.status !== 0) {
log('warn', args.logLevel, `SubMiner overlay stop command exited with status ${result.status}`);
}
if (state.overlayProc && !state.overlayProc.killed) {
try {
state.overlayProc.kill('SIGTERM');
} catch {
// ignore
}
}
}
if (state.mpvProc && !state.mpvProc.killed) { if (state.mpvProc && !state.mpvProc.killed) {
try { try {
@@ -761,6 +755,8 @@ export function stopOverlay(args: Args): void {
} }
export async function cleanupPlaybackSession(args: Args): Promise<void> { export async function cleanupPlaybackSession(args: Args): Promise<void> {
stopManagedOverlayApp(args);
if (state.mpvProc && !state.mpvProc.killed) { if (state.mpvProc && !state.mpvProc.killed) {
try { try {
state.mpvProc.kill('SIGTERM'); state.mpvProc.kill('SIGTERM');
@@ -783,9 +779,40 @@ export async function cleanupPlaybackSession(args: Args): Promise<void> {
await terminateTrackedDetachedMpv(args.logLevel); await terminateTrackedDetachedMpv(args.logLevel);
} }
function stopManagedOverlayApp(args: Args): void {
if (!(state.overlayManagedByLauncher && state.appPath)) {
return;
}
log('info', args.logLevel, 'Stopping SubMiner overlay...');
const stopArgs = ['--stop'];
if (args.logLevel !== 'info') stopArgs.push('--log-level', args.logLevel);
const target = resolveAppSpawnTarget(state.appPath, stopArgs);
const result = spawnSync(target.command, target.args, {
stdio: 'ignore',
env: buildAppEnv(),
});
if (result.error) {
log('warn', args.logLevel, `Failed to stop SubMiner overlay: ${result.error.message}`);
} else if (typeof result.status === 'number' && result.status !== 0) {
log('warn', args.logLevel, `SubMiner overlay stop command exited with status ${result.status}`);
}
if (state.overlayProc && !state.overlayProc.killed) {
try {
state.overlayProc.kill('SIGTERM');
} catch {
// ignore
}
}
}
function buildAppEnv(): NodeJS.ProcessEnv { function buildAppEnv(): NodeJS.ProcessEnv {
const env: Record<string, string | undefined> = { const env: Record<string, string | undefined> = {
...process.env, ...process.env,
SUBMINER_APP_LOG: getAppLogPath(),
SUBMINER_MPV_LOG: getMpvLogPath(), SUBMINER_MPV_LOG: getMpvLogPath(),
}; };
delete env.ELECTRON_RUN_AS_NODE; delete env.ELECTRON_RUN_AS_NODE;
@@ -804,6 +831,64 @@ function buildAppEnv(): NodeJS.ProcessEnv {
return env; return env;
} }
function appendCapturedAppOutput(kind: 'STDOUT' | 'STDERR', chunk: string): void {
const normalized = chunk.replace(/\r\n/g, '\n');
for (const line of normalized.split('\n')) {
if (!line) continue;
appendToAppLog(`[${kind}] ${line}`);
}
}
function attachAppProcessLogging(
proc: ReturnType<typeof spawn>,
options?: {
mirrorStdout?: boolean;
mirrorStderr?: boolean;
},
): void {
proc.stdout?.setEncoding('utf8');
proc.stderr?.setEncoding('utf8');
proc.stdout?.on('data', (chunk: string) => {
appendCapturedAppOutput('STDOUT', chunk);
if (options?.mirrorStdout) process.stdout.write(chunk);
});
proc.stderr?.on('data', (chunk: string) => {
appendCapturedAppOutput('STDERR', chunk);
if (options?.mirrorStderr) process.stderr.write(chunk);
});
}
function runSyncAppCommand(
appPath: string,
appArgs: string[],
mirrorOutput: boolean,
): {
status: number;
stdout: string;
stderr: string;
error?: Error;
} {
const target = resolveAppSpawnTarget(appPath, appArgs);
const result = spawnSync(target.command, target.args, {
env: buildAppEnv(),
encoding: 'utf8',
});
if (result.stdout) {
appendCapturedAppOutput('STDOUT', result.stdout);
if (mirrorOutput) process.stdout.write(result.stdout);
}
if (result.stderr) {
appendCapturedAppOutput('STDERR', result.stderr);
if (mirrorOutput) process.stderr.write(result.stderr);
}
return {
status: result.status ?? 1,
stdout: result.stdout ?? '',
stderr: result.stderr ?? '',
error: result.error ?? undefined,
};
}
function maybeCaptureAppArgs(appArgs: string[]): boolean { function maybeCaptureAppArgs(appArgs: string[]): boolean {
const capturePath = process.env.SUBMINER_TEST_CAPTURE?.trim(); const capturePath = process.env.SUBMINER_TEST_CAPTURE?.trim();
if (!capturePath) { if (!capturePath) {
@@ -821,20 +906,23 @@ function resolveAppSpawnTarget(appPath: string, appArgs: string[]): SpawnTarget
return resolveCommandInvocation(appPath, appArgs); return resolveCommandInvocation(appPath, appArgs);
} }
export function runAppCommandWithInherit(appPath: string, appArgs: string[]): never { export function runAppCommandWithInherit(appPath: string, appArgs: string[]): void {
if (maybeCaptureAppArgs(appArgs)) { if (maybeCaptureAppArgs(appArgs)) {
process.exit(0); process.exit(0);
} }
const target = resolveAppSpawnTarget(appPath, appArgs); const target = resolveAppSpawnTarget(appPath, appArgs);
const result = spawnSync(target.command, target.args, { const proc = spawn(target.command, target.args, {
stdio: 'inherit', stdio: ['ignore', 'pipe', 'pipe'],
env: buildAppEnv(), env: buildAppEnv(),
}); });
if (result.error) { attachAppProcessLogging(proc, { mirrorStdout: true, mirrorStderr: true });
fail(`Failed to run app command: ${result.error.message}`); proc.once('error', (error) => {
} fail(`Failed to run app command: ${error.message}`);
process.exit(result.status ?? 0); });
proc.once('close', (code) => {
process.exit(code ?? 0);
});
} }
export function runAppCommandCaptureOutput( export function runAppCommandCaptureOutput(
@@ -854,18 +942,7 @@ export function runAppCommandCaptureOutput(
}; };
} }
const target = resolveAppSpawnTarget(appPath, appArgs); return runSyncAppCommand(appPath, appArgs, false);
const result = spawnSync(target.command, target.args, {
env: buildAppEnv(),
encoding: 'utf8',
});
return {
status: result.status ?? 1,
stdout: result.stdout ?? '',
stderr: result.stderr ?? '',
error: result.error ?? undefined,
};
} }
export function runAppCommandAttached( export function runAppCommandAttached(
@@ -887,13 +964,14 @@ export function runAppCommandAttached(
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const proc = spawn(target.command, target.args, { const proc = spawn(target.command, target.args, {
stdio: 'inherit', stdio: ['ignore', 'pipe', 'pipe'],
env: buildAppEnv(), env: buildAppEnv(),
}); });
attachAppProcessLogging(proc, { mirrorStdout: true, mirrorStderr: true });
proc.once('error', (error) => { proc.once('error', (error) => {
reject(error); reject(error);
}); });
proc.once('exit', (code, signal) => { proc.once('close', (code, signal) => {
if (code !== null) { if (code !== null) {
resolve(code); resolve(code);
} else if (signal) { } else if (signal) {
@@ -921,10 +999,7 @@ export function runAppCommandWithInheritLogged(
logLevel, logLevel,
`${label}: launching app with args: ${[target.command, ...target.args].join(' ')}`, `${label}: launching app with args: ${[target.command, ...target.args].join(' ')}`,
); );
const result = spawnSync(target.command, target.args, { const result = runSyncAppCommand(appPath, appArgs, true);
stdio: 'inherit',
env: buildAppEnv(),
});
if (result.error) { if (result.error) {
fail(`Failed to run app command: ${result.error.message}`); fail(`Failed to run app command: ${result.error.message}`);
} }
@@ -953,8 +1028,13 @@ export function launchAppCommandDetached(
logLevel, logLevel,
`${label}: launching detached app with args: ${[target.command, ...target.args].join(' ')}`, `${label}: launching detached app with args: ${[target.command, ...target.args].join(' ')}`,
); );
const appLogPath = getAppLogPath();
fs.mkdirSync(path.dirname(appLogPath), { recursive: true });
const stdoutFd = fs.openSync(appLogPath, 'a');
const stderrFd = fs.openSync(appLogPath, 'a');
try {
const proc = spawn(target.command, target.args, { const proc = spawn(target.command, target.args, {
stdio: 'ignore', stdio: ['ignore', stdoutFd, stderrFd],
detached: true, detached: true,
env: buildAppEnv(), env: buildAppEnv(),
}); });
@@ -962,6 +1042,10 @@ export function launchAppCommandDetached(
log('warn', logLevel, `${label}: failed to launch detached app: ${error.message}`); log('warn', logLevel, `${label}: failed to launch detached app: ${error.message}`);
}); });
proc.unref(); proc.unref();
} finally {
fs.closeSync(stdoutFd);
fs.closeSync(stderrFd);
}
} }
export function launchMpvIdleDetached( export function launchMpvIdleDetached(

View File

@@ -85,6 +85,13 @@ test('parseArgs maps mpv idle action', () => {
assert.equal(parsed.mpvStatus, false); assert.equal(parsed.mpvStatus, false);
}); });
test('parseArgs captures youtube mode forwarding', () => {
const parsed = parseArgs(['youtube', 'https://example.com', '--mode', 'generate'], 'subminer', {});
assert.equal(parsed.target, 'https://example.com');
assert.equal(parsed.youtubeMode, 'generate');
});
test('parseArgs maps dictionary command and log-level override', () => { test('parseArgs maps dictionary command and log-level override', () => {
const parsed = parseArgs(['dictionary', '.', '--log-level', 'debug'], 'subminer', {}); const parsed = parseArgs(['dictionary', '.', '--log-level', 'debug'], 'subminer', {});

View File

@@ -310,6 +310,7 @@ test(
const appStartPath = path.join(smokeCase.artifactsDir, 'fake-app-start.log'); const appStartPath = path.join(smokeCase.artifactsDir, 'fake-app-start.log');
const appStopPath = path.join(smokeCase.artifactsDir, 'fake-app-stop.log'); const appStopPath = path.join(smokeCase.artifactsDir, 'fake-app-stop.log');
await waitForJsonLines(appStartPath, 1); await waitForJsonLines(appStartPath, 1);
await waitForJsonLines(appStopPath, 1);
const appStartEntries = readJsonLines(appStartPath); const appStartEntries = readJsonLines(appStartPath);
const appStopEntries = readJsonLines(appStopPath); const appStopEntries = readJsonLines(appStopPath);
@@ -324,7 +325,7 @@ test(
assert.match(result.stdout, /Starting SubMiner overlay/i); assert.match(result.stdout, /Starting SubMiner overlay/i);
assert.equal(appStartEntries.length, 1); assert.equal(appStartEntries.length, 1);
assert.equal(appStopEntries.length, 0); assert.equal(appStopEntries.length, 1);
assert.equal(mpvEntries.length >= 1, true); assert.equal(mpvEntries.length >= 1, true);
const appStartArgs = appStartEntries[0]?.argv; const appStartArgs = appStartEntries[0]?.argv;

View File

@@ -1,5 +1,6 @@
import path from 'node:path'; import path from 'node:path';
import os from 'node:os'; import os from 'node:os';
import { resolveDefaultLogFilePath } from '../src/shared/log-files.js';
export { VIDEO_EXTENSIONS } from '../src/shared/video-extensions.js'; export { VIDEO_EXTENSIONS } from '../src/shared/video-extensions.js';
export const ROFI_THEME_FILE = 'subminer.rasi'; export const ROFI_THEME_FILE = 'subminer.rasi';
@@ -29,21 +30,28 @@ export const DEFAULT_YOUTUBE_SUBGEN_OUT_DIR = path.join(
'subminer', 'subminer',
'youtube-subs', 'youtube-subs',
); );
export function getDefaultLauncherLogFile(options?: {
platform?: NodeJS.Platform;
homeDir?: string;
appDataDir?: string;
}): string {
return resolveDefaultLogFilePath('launcher', {
platform: options?.platform ?? process.platform,
homeDir: options?.homeDir ?? os.homedir(),
appDataDir: options?.appDataDir,
});
}
export function getDefaultMpvLogFile(options?: { export function getDefaultMpvLogFile(options?: {
platform?: NodeJS.Platform; platform?: NodeJS.Platform;
homeDir?: string; homeDir?: string;
appDataDir?: string; appDataDir?: string;
}): string { }): string {
const platform = options?.platform ?? process.platform; return resolveDefaultLogFilePath('mpv', {
const homeDir = options?.homeDir ?? os.homedir(); platform: options?.platform ?? process.platform,
const baseDir = homeDir: options?.homeDir ?? os.homedir(),
platform === 'win32' appDataDir: options?.appDataDir,
? path.join( });
options?.appDataDir?.trim() || path.join(homeDir, 'AppData', 'Roaming'),
'SubMiner',
)
: path.join(homeDir, '.config', 'SubMiner');
return path.join(baseDir, 'logs', `SubMiner-${new Date().toISOString().slice(0, 10)}.log`);
} }
export const DEFAULT_MPV_LOG_FILE = getDefaultMpvLogFile(); export const DEFAULT_MPV_LOG_FILE = getDefaultMpvLogFile();
@@ -79,6 +87,7 @@ export interface Args {
recursive: boolean; recursive: boolean;
profile: string; profile: string;
startOverlay: boolean; startOverlay: boolean;
youtubeMode?: 'download' | 'generate';
whisperBin: string; whisperBin: string;
whisperModel: string; whisperModel: string;
whisperVadModel: string; whisperVadModel: string;

View File

@@ -1,6 +1,6 @@
{ {
"name": "subminer", "name": "subminer",
"version": "0.7.0", "version": "0.9.0",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration", "description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5", "packageManager": "bun@1.3.5",
"main": "dist/main-entry.js", "main": "dist/main-entry.js",
@@ -77,6 +77,12 @@
"build:win": "bun run build && electron-builder --win nsis zip --publish never", "build:win": "bun run build && electron-builder --win nsis zip --publish never",
"build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs" "build:win:unsigned": "bun run build && node scripts/build-win-unsigned.mjs"
}, },
"overrides": {
"app-builder-lib": "26.8.2",
"electron-builder-squirrel-windows": "26.8.2",
"minimatch": "10.2.3",
"tar": "7.5.11"
},
"keywords": [ "keywords": [
"anki", "anki",
"ankiconnect", "ankiconnect",
@@ -105,7 +111,7 @@
"@types/node": "^25.3.0", "@types/node": "^25.3.0",
"@types/ws": "^8.18.1", "@types/ws": "^8.18.1",
"electron": "^37.10.3", "electron": "^37.10.3",
"electron-builder": "^26.8.1", "electron-builder": "26.8.2",
"esbuild": "^0.25.12", "esbuild": "^0.25.12",
"prettier": "^3.8.1", "prettier": "^3.8.1",
"typescript": "^5.9.3" "typescript": "^5.9.3"
@@ -159,12 +165,21 @@
"include": "build/installer.nsh" "include": "build/installer.nsh"
}, },
"files": [ "files": [
"dist/**/*", "**/*",
"stats/dist/**/*", "!src{,/**/*}",
"vendor/texthooker-ui/docs/**/*", "!launcher{,/**/*}",
"vendor/texthooker-ui/package.json", "!stats/src{,/**/*}",
"package.json", "!stats/index.html",
"scripts/get-mpv-window-macos.swift" "!docs-site{,/**/*}",
"!changes{,/**/*}",
"!backlog{,/**/*}",
"!.tmp{,/**/*}",
"!release-*{,/**/*}",
"!vendor/subminer-yomitan{,/**/*}",
"!vendor/texthooker-ui/src{,/**/*}",
"!vendor/texthooker-ui/node_modules{,/**/*}",
"!vendor/texthooker-ui/.svelte-kit{,/**/*}",
"!vendor/texthooker-ui/package-lock.json"
], ],
"extraResources": [ "extraResources": [
{ {

View File

@@ -33,6 +33,7 @@ function M.load(options_lib, default_socket_path)
auto_start = true, auto_start = true,
auto_start_visible_overlay = true, auto_start_visible_overlay = true,
auto_start_pause_until_ready = true, auto_start_pause_until_ready = true,
auto_start_pause_until_ready_timeout_seconds = 15,
osd_messages = true, osd_messages = true,
log_level = "info", log_level = "info",
aniskip_enabled = true, aniskip_enabled = true,

View File

@@ -2,9 +2,9 @@ local M = {}
local OVERLAY_START_RETRY_DELAY_SECONDS = 0.2 local OVERLAY_START_RETRY_DELAY_SECONDS = 0.2
local OVERLAY_START_MAX_ATTEMPTS = 6 local OVERLAY_START_MAX_ATTEMPTS = 6
local AUTO_PLAY_READY_TIMEOUT_SECONDS = 15
local AUTO_PLAY_READY_LOADING_OSD = "Loading subtitle tokenization..." local AUTO_PLAY_READY_LOADING_OSD = "Loading subtitle tokenization..."
local AUTO_PLAY_READY_READY_OSD = "Subtitle tokenization ready" local AUTO_PLAY_READY_READY_OSD = "Subtitle tokenization ready"
local DEFAULT_AUTO_PLAY_READY_TIMEOUT_SECONDS = 15
function M.create(ctx) function M.create(ctx)
local mp = ctx.mp local mp = ctx.mp
@@ -34,6 +34,23 @@ function M.create(ctx)
return options_helper.coerce_bool(raw_pause_until_ready, false) return options_helper.coerce_bool(raw_pause_until_ready, false)
end end
local function resolve_pause_until_ready_timeout_seconds()
local raw_timeout_seconds = opts.auto_start_pause_until_ready_timeout_seconds
if raw_timeout_seconds == nil then
raw_timeout_seconds = opts["auto-start-pause-until-ready-timeout-seconds"]
end
if type(raw_timeout_seconds) == "number" then
return raw_timeout_seconds
end
if type(raw_timeout_seconds) == "string" then
local parsed = tonumber(raw_timeout_seconds)
if parsed ~= nil then
return parsed
end
end
return DEFAULT_AUTO_PLAY_READY_TIMEOUT_SECONDS
end
local function normalize_socket_path(path) local function normalize_socket_path(path)
if type(path) ~= "string" then if type(path) ~= "string" then
return nil return nil
@@ -118,7 +135,9 @@ function M.create(ctx)
end) end)
end end
subminer_log("info", "process", "Pausing playback until SubMiner overlay/tokenization readiness signal") subminer_log("info", "process", "Pausing playback until SubMiner overlay/tokenization readiness signal")
state.auto_play_ready_timeout = mp.add_timeout(AUTO_PLAY_READY_TIMEOUT_SECONDS, function() local timeout_seconds = resolve_pause_until_ready_timeout_seconds()
if timeout_seconds and timeout_seconds > 0 then
state.auto_play_ready_timeout = mp.add_timeout(timeout_seconds, function()
if not state.auto_play_ready_gate_armed then if not state.auto_play_ready_gate_armed then
return return
end end
@@ -130,6 +149,7 @@ function M.create(ctx)
release_auto_play_ready_gate("timeout") release_auto_play_ready_gate("timeout")
end) end)
end end
end
local function notify_auto_play_ready() local function notify_auto_play_ready()
release_auto_play_ready_gate("tokenization-ready") release_auto_play_ready_gate("tokenization-ready")

View File

@@ -95,6 +95,43 @@ test('writeChangelogArtifacts ignores README, groups fragments by type, writes r
} }
}); });
test('writeChangelogArtifacts skips changelog prepend when release section already exists', async () => {
const { writeChangelogArtifacts } = await loadModule();
const workspace = createWorkspace('write-artifacts-existing-version');
const projectRoot = path.join(workspace, 'SubMiner');
const existingChangelog = [
'# Changelog',
'',
'## v0.4.1 (2026-03-07)',
'### Added',
'- Existing release bullet.',
'',
].join('\n');
fs.mkdirSync(projectRoot, { recursive: true });
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), existingChangelog, 'utf8');
fs.writeFileSync(path.join(projectRoot, 'changes', '001.md'), ['type: added', 'area: overlay', '', '- Stale release fragment.'].join('\n'), 'utf8');
try {
const result = writeChangelogArtifacts({
cwd: projectRoot,
version: '0.4.1',
date: '2026-03-08',
});
assert.deepEqual(result.deletedFragmentPaths, [path.join(projectRoot, 'changes', '001.md')]);
assert.equal(fs.existsSync(path.join(projectRoot, 'changes', '001.md')), false);
const changelog = fs.readFileSync(path.join(projectRoot, 'CHANGELOG.md'), 'utf8');
assert.equal(changelog, existingChangelog);
const releaseNotes = fs.readFileSync(path.join(projectRoot, 'release', 'release-notes.md'), 'utf8');
assert.match(releaseNotes, /## Highlights\n### Added\n- Existing release bullet\./);
} finally {
fs.rmSync(workspace, { recursive: true, force: true });
}
});
test('verifyChangelogReadyForRelease ignores README but rejects pending fragments and missing version sections', async () => { test('verifyChangelogReadyForRelease ignores README but rejects pending fragments and missing version sections', async () => {
const { verifyChangelogReadyForRelease } = await loadModule(); const { verifyChangelogReadyForRelease } = await loadModule();
const workspace = createWorkspace('verify-release'); const workspace = createWorkspace('verify-release');

View File

@@ -341,12 +341,34 @@ export function writeChangelogArtifacts(options?: ChangelogOptions): {
const version = resolveVersion(options ?? {}); const version = resolveVersion(options ?? {});
const date = resolveDate(options?.date); const date = resolveDate(options?.date);
const fragments = readChangeFragments(cwd, options?.deps); const fragments = readChangeFragments(cwd, options?.deps);
const releaseSection = buildReleaseSection(version, date, fragments);
const existingChangelogPath = path.join(cwd, 'CHANGELOG.md'); const existingChangelogPath = path.join(cwd, 'CHANGELOG.md');
const existingChangelog = existsSync(existingChangelogPath) const existingChangelog = existsSync(existingChangelogPath)
? readFileSync(existingChangelogPath, 'utf8') ? readFileSync(existingChangelogPath, 'utf8')
: ''; : '';
const outputPaths = resolveChangelogOutputPaths({ cwd }); const outputPaths = resolveChangelogOutputPaths({ cwd });
const existingReleaseSection = extractReleaseSectionBody(existingChangelog, version);
if (existingReleaseSection !== null) {
log(`Existing section found for v${version}; skipping changelog prepend.`);
for (const fragment of fragments) {
rmSync(fragment.path);
log(`Removed ${fragment.path}`);
}
const releaseNotesPath = writeReleaseNotesFile(
cwd,
existingReleaseSection,
options?.deps,
);
log(`Generated ${releaseNotesPath}`);
return {
deletedFragmentPaths: fragments.map((fragment) => fragment.path),
outputPaths,
releaseNotesPath,
};
}
const releaseSection = buildReleaseSection(version, date, fragments);
const nextChangelog = prependReleaseSection(existingChangelog, releaseSection, version); const nextChangelog = prependReleaseSection(existingChangelog, releaseSection, version);
for (const outputPath of outputPaths) { for (const outputPath of outputPaths) {

157
scripts/patch-modernz.sh Executable file
View File

@@ -0,0 +1,157 @@
#!/bin/bash
set -euo pipefail
TARGET="${HOME}/.config/mpv/scripts/modernz.lua"
usage() {
cat <<'EOF'
Usage: patch-modernz.sh [--target /path/to/modernz.lua]
Applies the local ModernZ OSC sidebar-resize patch to an existing modernz.lua.
If the target file does not exist, the script exits without changing anything.
EOF
}
while [[ $# -gt 0 ]]; do
case "$1" in
--target)
if [[ $# -lt 2 || -z "${2:-}" || "$2" == -* ]]; then
echo "patch-modernz: --target requires a non-empty file path" >&2
usage >&2
exit 1
fi
TARGET="$2"
shift 2
;;
--help|-h)
usage
exit 0
;;
*)
echo "Unknown argument: $1" >&2
exit 1
;;
esac
done
if [[ ! -f "$TARGET" ]]; then
echo "patch-modernz: target missing, skipped: $TARGET"
exit 0
fi
if grep -q 'get_external_video_margin_ratio' "$TARGET" \
&& grep -q 'observe_cached("video-margin-ratio-right"' "$TARGET"; then
echo "patch-modernz: already patched: $TARGET"
exit 0
fi
if ! patch --forward --quiet "$TARGET" <<'PATCH'
--- a/modernz.lua
+++ b/modernz.lua
@@ -931,6 +931,26 @@ local function reset_margins()
set_margin_offset("osd-margin-y", 0)
end
+local function get_external_video_margin_ratio(prop)
+ local value = mp.get_property_number(prop, 0) or 0
+ if value < 0 then return 0 end
+ if value > 0.95 then return 0.95 end
+ return value
+end
+
+local function get_layout_horizontal_bounds()
+ local margin_l = get_external_video_margin_ratio("video-margin-ratio-left")
+ local margin_r = get_external_video_margin_ratio("video-margin-ratio-right")
+ local width_ratio = math.max(0.05, 1 - margin_l - margin_r)
+ local pos_x = osc_param.playresx * margin_l
+ local width = osc_param.playresx * width_ratio
+
+ osc_param.video_margins.l = margin_l
+ osc_param.video_margins.r = margin_r
+
+ return pos_x, width
+end
+
local function update_margins()
local use_margins = get_hidetimeout() < 0 or user_opts.dynamic_margins
local top_vis = state.wc_visible
@@ -1965,8 +1985,9 @@ layouts["modern"] = function ()
local chapter_index = user_opts.show_chapter_title and mp.get_property_number("chapter", -1) >= 0
local osc_height_offset = (no_title and user_opts.notitle_osc_h_offset or 0) + ((no_chapter or not chapter_index) and user_opts.nochapter_osc_h_offset or 0)
+ local posX, layout_width = get_layout_horizontal_bounds()
local osc_geo = {
- w = osc_param.playresx,
+ w = layout_width,
h = user_opts.osc_height - osc_height_offset
}
@@ -1974,7 +1995,6 @@ layouts["modern"] = function ()
osc_param.video_margins.b = math.max(user_opts.osc_height, user_opts.fade_alpha) / osc_param.playresy
-- origin of the controllers, left/bottom corner
- local posX = 0
local posY = osc_param.playresy
osc_param.areas = {} -- delete areas
@@ -2191,8 +2211,9 @@ layouts["modern-compact"] = function ()
((user_opts.title_mbtn_left_command == "" and user_opts.title_mbtn_right_command == "") and 25 or 0) +
(((user_opts.chapter_title_mbtn_left_command == "" and user_opts.chapter_title_mbtn_right_command == "") or not chapter_index) and 10 or 0)
+ local posX, layout_width = get_layout_horizontal_bounds()
local osc_geo = {
- w = osc_param.playresx,
+ w = layout_width,
h = 145 - osc_height_offset
}
@@ -2200,7 +2221,6 @@ layouts["modern-compact"] = function ()
osc_param.video_margins.b = math.max(osc_geo.h, user_opts.fade_alpha) / osc_param.playresy
-- origin of the controllers, left/bottom corner
- local posX = 0
local posY = osc_param.playresy
osc_param.areas = {} -- delete areas
@@ -2370,8 +2390,9 @@ layouts["modern-compact"] = function ()
end
layouts["modern-image"] = function ()
+ local posX, layout_width = get_layout_horizontal_bounds()
local osc_geo = {
- w = osc_param.playresx,
+ w = layout_width,
h = 50
}
@@ -2379,7 +2400,6 @@ layouts["modern-image"] = function ()
osc_param.video_margins.b = math.max(50, user_opts.fade_alpha) / osc_param.playresy
-- origin of the controllers, left/bottom corner
- local posX = 0
local posY = osc_param.playresy
osc_param.areas = {} -- delete areas
@@ -3718,6 +3738,14 @@ observe_cached("border", request_init_resize)
observe_cached("title-bar", request_init_resize)
observe_cached("window-maximized", request_init_resize)
observe_cached("idle-active", request_tick)
+observe_cached("video-margin-ratio-left", function ()
+ state.marginsREQ = true
+ request_init_resize()
+end)
+observe_cached("video-margin-ratio-right", function ()
+ state.marginsREQ = true
+ request_init_resize()
+end)
mp.observe_property("user-data/mpv/console/open", "bool", function(_, val)
if val and user_opts.visibility == "auto" and not user_opts.showonselect then
osc_visible(false)
PATCH
then
echo "patch-modernz: failed to apply patch to $TARGET" >&2
exit 1
fi
echo "patch-modernz: patched $TARGET"

View File

@@ -0,0 +1,76 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import { spawnSync } from 'node:child_process';
import test from 'node:test';
function withTempDir<T>(fn: (dir: string) => T): T {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-patch-modernz-test-'));
try {
return fn(dir);
} finally {
fs.rmSync(dir, { recursive: true, force: true });
}
}
function writeExecutable(filePath: string, contents: string): void {
fs.writeFileSync(filePath, contents, 'utf8');
fs.chmodSync(filePath, 0o755);
}
test('patch-modernz rejects a missing --target value', () => {
withTempDir((root) => {
const result = spawnSync('bash', ['scripts/patch-modernz.sh', '--target'], {
cwd: process.cwd(),
encoding: 'utf8',
env: {
...process.env,
HOME: path.join(root, 'home'),
},
});
assert.equal(result.status, 1, result.stderr || result.stdout);
assert.match(result.stderr, /--target requires a non-empty file path/);
assert.match(result.stderr, /Usage: patch-modernz\.sh/);
});
});
test('patch-modernz reports patch failures explicitly', () => {
withTempDir((root) => {
const binDir = path.join(root, 'bin');
const target = path.join(root, 'modernz.lua');
const patchLog = path.join(root, 'patch.log');
fs.mkdirSync(binDir, { recursive: true });
fs.mkdirSync(path.dirname(target), { recursive: true });
fs.writeFileSync(target, 'original', 'utf8');
writeExecutable(
path.join(binDir, 'patch'),
`#!/usr/bin/env bash
set -euo pipefail
cat > "${patchLog}"
exit 1
`,
);
const result = spawnSync(
'bash',
['scripts/patch-modernz.sh', '--target', target],
{
cwd: process.cwd(),
encoding: 'utf8',
env: {
...process.env,
HOME: path.join(root, 'home'),
PATH: `${binDir}:${process.env.PATH || ''}`,
},
},
);
assert.equal(result.status, 1, result.stderr || result.stdout);
assert.match(result.stderr, /failed to apply patch to/);
assert.equal(fs.readFileSync(patchLog, 'utf8').includes('modernz.lua'), true);
});
});

View File

@@ -58,6 +58,7 @@ import { NoteUpdateWorkflow } from './anki-integration/note-update-workflow';
import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow'; import { FieldGroupingWorkflow } from './anki-integration/field-grouping-workflow';
import { resolveAnimatedImageLeadInSeconds } from './anki-integration/animated-image-sync'; import { resolveAnimatedImageLeadInSeconds } from './anki-integration/animated-image-sync';
import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime'; import { AnkiIntegrationRuntime, normalizeAnkiIntegrationConfig } from './anki-integration/runtime';
import { resolveMediaGenerationInputPath } from './anki-integration/media-source';
const log = createLogger('anki').child('integration'); const log = createLogger('anki').child('integration');
@@ -597,6 +598,10 @@ export class AnkiIntegration {
this.runtime.start(); this.runtime.start();
} }
waitUntilReady(): Promise<void> {
return this.runtime.waitUntilReady();
}
stop(): void { stop(): void {
this.runtime.stop(); this.runtime.stop();
} }
@@ -647,7 +652,10 @@ export class AnkiIntegration {
return null; return null;
} }
const videoPath = mpvClient.currentVideoPath; const videoPath = await resolveMediaGenerationInputPath(mpvClient, 'audio');
if (!videoPath) {
return null;
}
let startTime = mpvClient.currentSubStart; let startTime = mpvClient.currentSubStart;
let endTime = mpvClient.currentSubEnd; let endTime = mpvClient.currentSubEnd;
@@ -672,7 +680,10 @@ export class AnkiIntegration {
return null; return null;
} }
const videoPath = this.mpvClient.currentVideoPath; const videoPath = await resolveMediaGenerationInputPath(this.mpvClient, 'video');
if (!videoPath) {
return null;
}
const timestamp = this.mpvClient.currentTimePos || 0; const timestamp = this.mpvClient.currentTimePos || 0;
if (this.config.media?.imageType === 'avif') { if (this.config.media?.imageType === 'avif') {
@@ -946,8 +957,15 @@ export class AnkiIntegration {
if (this.mpvClient && this.mpvClient.currentVideoPath) { if (this.mpvClient && this.mpvClient.currentVideoPath) {
try { try {
const timestamp = this.mpvClient.currentTimePos || 0; const timestamp = this.mpvClient.currentTimePos || 0;
const notificationIconSource = await resolveMediaGenerationInputPath(
this.mpvClient,
'video',
);
if (!notificationIconSource) {
throw new Error('No media source available for notification icon');
}
const iconBuffer = await this.mediaGenerator.generateNotificationIcon( const iconBuffer = await this.mediaGenerator.generateNotificationIcon(
this.mpvClient.currentVideoPath, notificationIconSource,
timestamp, timestamp,
); );
if (iconBuffer && iconBuffer.length > 0) { if (iconBuffer && iconBuffer.length > 0) {

View File

@@ -35,6 +35,9 @@ export class AnkiConnectProxyServer {
private pendingNoteIdSet = new Set<number>(); private pendingNoteIdSet = new Set<number>();
private inFlightNoteIds = new Set<number>(); private inFlightNoteIds = new Set<number>();
private processingQueue = false; private processingQueue = false;
private readyPromise: Promise<void> | null = null;
private resolveReady: (() => void) | null = null;
private rejectReady: ((error: Error) => void) | null = null;
constructor(private readonly deps: AnkiConnectProxyServerDeps) { constructor(private readonly deps: AnkiConnectProxyServerDeps) {
this.client = axios.create({ this.client = axios.create({
@@ -48,6 +51,13 @@ export class AnkiConnectProxyServer {
return this.server !== null; return this.server !== null;
} }
waitUntilReady(): Promise<void> {
if (!this.server || this.server.listening) {
return Promise.resolve();
}
return this.readyPromise ?? Promise.resolve();
}
start(options: StartProxyOptions): void { start(options: StartProxyOptions): void {
this.stop(); this.stop();
@@ -58,15 +68,26 @@ export class AnkiConnectProxyServer {
return; return;
} }
this.readyPromise = new Promise<void>((resolve, reject) => {
this.resolveReady = resolve;
this.rejectReady = reject;
});
this.server = http.createServer((req, res) => { this.server = http.createServer((req, res) => {
void this.handleRequest(req, res, options.upstreamUrl); void this.handleRequest(req, res, options.upstreamUrl);
}); });
this.server.on('error', (error) => { this.server.on('error', (error) => {
this.rejectReady?.(error as Error);
this.resolveReady = null;
this.rejectReady = null;
this.deps.logError('[anki-proxy] Server error:', (error as Error).message); this.deps.logError('[anki-proxy] Server error:', (error as Error).message);
}); });
this.server.listen(options.port, options.host, () => { this.server.listen(options.port, options.host, () => {
this.resolveReady?.();
this.resolveReady = null;
this.rejectReady = null;
this.deps.logInfo( this.deps.logInfo(
`[anki-proxy] Listening on http://${options.host}:${options.port} -> ${options.upstreamUrl}`, `[anki-proxy] Listening on http://${options.host}:${options.port} -> ${options.upstreamUrl}`,
); );
@@ -79,6 +100,10 @@ export class AnkiConnectProxyServer {
this.server = null; this.server = null;
this.deps.logInfo('[anki-proxy] Stopped'); this.deps.logInfo('[anki-proxy] Stopped');
} }
this.rejectReady?.(new Error('AnkiConnect proxy stopped before becoming ready'));
this.readyPromise = null;
this.resolveReady = null;
this.rejectReady = null;
this.pendingNoteIds = []; this.pendingNoteIds = [];
this.pendingNoteIdSet.clear(); this.pendingNoteIdSet.clear();
this.inFlightNoteIds.clear(); this.inFlightNoteIds.clear();

View File

@@ -283,3 +283,117 @@ test('CardCreationService keeps updating after recordCardsMinedCallback throws',
assert.equal(calls.notesInfo, 1); assert.equal(calls.notesInfo, 1);
assert.equal(calls.updateNoteFields, 1); assert.equal(calls.updateNoteFields, 1);
}); });
test('CardCreationService uses stream-open-filename for remote media generation', async () => {
const audioPaths: string[] = [];
const imagePaths: string[] = [];
const edlSource = [
'edl://!new_stream;!no_clip;!no_chapters;%70%https://audio.example/videoplayback?mime=audio%2Fwebm',
'!new_stream;!no_clip;!no_chapters;%69%https://video.example/videoplayback?mime=video%2Fmp4',
'!global_tags,title=test',
].join(';');
const service = new CardCreationService({
getConfig: () =>
({
deck: 'Mining',
fields: {
sentence: 'Sentence',
audio: 'SentenceAudio',
image: 'Picture',
},
media: {
generateAudio: true,
generateImage: true,
imageFormat: 'jpg',
},
behavior: {},
ai: false,
}) as AnkiConnectConfig,
getAiConfig: () => ({}),
getTimingTracker: () => ({}) as never,
getMpvClient: () =>
({
currentVideoPath: 'https://www.youtube.com/watch?v=abc123',
currentSubText: '字幕',
currentSubStart: 1,
currentSubEnd: 2,
currentTimePos: 1.5,
currentAudioStreamIndex: 0,
requestProperty: async (name: string) => {
assert.equal(name, 'stream-open-filename');
return edlSource;
},
}) as never,
client: {
addNote: async () => 42,
addTags: async () => undefined,
notesInfo: async () => [
{
noteId: 42,
fields: {
Sentence: { value: '' },
SentenceAudio: { value: '' },
Picture: { value: '' },
},
},
],
updateNoteFields: async () => undefined,
storeMediaFile: async () => undefined,
findNotes: async () => [],
retrieveMediaFile: async () => '',
},
mediaGenerator: {
generateAudio: async (path) => {
audioPaths.push(path);
return Buffer.from('audio');
},
generateScreenshot: async (path) => {
imagePaths.push(path);
return Buffer.from('image');
},
generateAnimatedImage: async () => null,
},
showOsdNotification: () => undefined,
showUpdateResult: () => undefined,
showStatusNotification: () => undefined,
showNotification: async () => undefined,
beginUpdateProgress: () => undefined,
endUpdateProgress: () => undefined,
withUpdateProgress: async (_message, action) => action(),
resolveConfiguredFieldName: (noteInfo, preferredName) => {
if (!preferredName) return null;
return Object.keys(noteInfo.fields).find((field) => field === preferredName) ?? null;
},
resolveNoteFieldName: (noteInfo, preferredName) => {
if (!preferredName) return null;
return Object.keys(noteInfo.fields).find((field) => field === preferredName) ?? null;
},
getAnimatedImageLeadInSeconds: async () => 0,
extractFields: () => ({}),
processSentence: (sentence) => sentence,
setCardTypeFields: () => undefined,
mergeFieldValue: (_existing, newValue) => newValue,
formatMiscInfoPattern: () => '',
getEffectiveSentenceCardConfig: () => ({
model: 'Sentence',
sentenceField: 'Sentence',
audioField: 'SentenceAudio',
lapisEnabled: false,
kikuEnabled: false,
kikuFieldGrouping: 'disabled',
kikuDeleteDuplicateInAuto: false,
}),
getFallbackDurationSeconds: () => 10,
appendKnownWordsFromNoteInfo: () => undefined,
isUpdateInProgress: () => false,
setUpdateInProgress: () => undefined,
trackLastAddedNoteId: () => undefined,
});
const created = await service.createSentenceCard('テスト', 0, 1);
assert.equal(created, true);
assert.deepEqual(audioPaths, ['https://audio.example/videoplayback?mime=audio%2Fwebm']);
assert.deepEqual(imagePaths, ['https://video.example/videoplayback?mime=video%2Fmp4']);
});

View File

@@ -8,6 +8,7 @@ import { createLogger } from '../logger';
import { SubtitleTimingTracker } from '../subtitle-timing-tracker'; import { SubtitleTimingTracker } from '../subtitle-timing-tracker';
import { MpvClient } from '../types'; import { MpvClient } from '../types';
import { resolveSentenceBackText } from './ai'; import { resolveSentenceBackText } from './ai';
import { resolveMediaGenerationInputPath } from './media-source';
const log = createLogger('anki').child('integration.card-creation'); const log = createLogger('anki').child('integration.card-creation');
@@ -501,7 +502,12 @@ export class CardCreationService {
this.deps.showOsdNotification('Creating sentence card...'); this.deps.showOsdNotification('Creating sentence card...');
try { try {
return await this.deps.withUpdateProgress('Creating sentence card', async () => { return await this.deps.withUpdateProgress('Creating sentence card', async () => {
const videoPath = mpvClient.currentVideoPath; const videoPath = await resolveMediaGenerationInputPath(mpvClient, 'video');
const audioSourcePath = await resolveMediaGenerationInputPath(mpvClient, 'audio');
if (!videoPath) {
this.deps.showOsdNotification('No video loaded');
return false;
}
const fields: Record<string, string> = {}; const fields: Record<string, string> = {};
const errors: string[] = []; const errors: string[] = [];
let miscInfoFilename: string | null = null; let miscInfoFilename: string | null = null;
@@ -605,7 +611,9 @@ export class CardCreationService {
try { try {
const audioFilename = this.generateAudioFilename(); const audioFilename = this.generateAudioFilename();
const audioBuffer = await this.mediaGenerateAudio(videoPath, startTime, endTime); const audioBuffer = audioSourcePath
? await this.mediaGenerateAudio(audioSourcePath, startTime, endTime)
: null;
if (audioBuffer) { if (audioBuffer) {
await this.deps.client.storeMediaFile(audioFilename, audioBuffer); await this.deps.client.storeMediaFile(audioFilename, audioBuffer);

View File

@@ -0,0 +1,64 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { resolveMediaGenerationInputPath } from './media-source';
test('resolveMediaGenerationInputPath keeps local file paths', async () => {
const result = await resolveMediaGenerationInputPath({
currentVideoPath: '/tmp/video.mkv',
});
assert.equal(result, '/tmp/video.mkv');
});
test('resolveMediaGenerationInputPath prefers stream-open-filename for remote media', async () => {
const requests: string[] = [];
const result = await resolveMediaGenerationInputPath({
currentVideoPath: 'https://www.youtube.com/watch?v=abc123',
requestProperty: async (name: string) => {
requests.push(name);
return 'https://rr1---sn.example.googlevideo.com/videoplayback?id=123';
},
});
assert.equal(result, 'https://rr1---sn.example.googlevideo.com/videoplayback?id=123');
assert.deepEqual(requests, ['stream-open-filename']);
});
test('resolveMediaGenerationInputPath unwraps mpv edl source for audio and video', async () => {
const edlSource = [
'edl://!new_stream;!no_clip;!no_chapters;%70%https://audio.example/videoplayback?mime=audio%2Fwebm',
'!new_stream;!no_clip;!no_chapters;%69%https://video.example/videoplayback?mime=video%2Fmp4',
'!global_tags,title=test',
].join(';');
const audioResult = await resolveMediaGenerationInputPath(
{
currentVideoPath: 'https://www.youtube.com/watch?v=abc123',
requestProperty: async () => edlSource,
},
'audio',
);
const videoResult = await resolveMediaGenerationInputPath(
{
currentVideoPath: 'https://www.youtube.com/watch?v=abc123',
requestProperty: async () => edlSource,
},
'video',
);
assert.equal(audioResult, 'https://audio.example/videoplayback?mime=audio%2Fwebm');
assert.equal(videoResult, 'https://video.example/videoplayback?mime=video%2Fmp4');
});
test('resolveMediaGenerationInputPath falls back to currentVideoPath when stream-open-filename fails', async () => {
const result = await resolveMediaGenerationInputPath({
currentVideoPath: 'https://www.youtube.com/watch?v=abc123',
requestProperty: async () => {
throw new Error('property unavailable');
},
});
assert.equal(result, 'https://www.youtube.com/watch?v=abc123');
});

View File

@@ -0,0 +1,84 @@
import { isRemoteMediaPath } from '../jimaku/utils';
import type { MpvClient } from '../types';
export type MediaGenerationKind = 'audio' | 'video';
function trimToNonEmptyString(value: unknown): string | null {
if (typeof value !== 'string') {
return null;
}
const trimmed = value.trim();
return trimmed.length > 0 ? trimmed : null;
}
function extractUrlsFromMpvEdlSource(source: string): string[] {
const matches = source.matchAll(/%\d+%(https?:\/\/.*?)(?=;!new_stream|;!global_tags|$)/gms);
return [...matches]
.map((match) => trimToNonEmptyString(match[1]))
.filter((value): value is string => value !== null);
}
function classifyMediaUrl(url: string): MediaGenerationKind | null {
try {
const mime = new URL(url).searchParams.get('mime')?.toLowerCase() ?? '';
if (mime.startsWith('audio/')) {
return 'audio';
}
if (mime.startsWith('video/')) {
return 'video';
}
} catch {
// Ignore malformed URLs and fall back to stream order.
}
return null;
}
function resolvePreferredUrlFromMpvEdlSource(
source: string,
kind: MediaGenerationKind,
): string | null {
const urls = extractUrlsFromMpvEdlSource(source);
if (urls.length === 0) {
return null;
}
const typedMatch = urls.find((url) => classifyMediaUrl(url) === kind);
if (typedMatch) {
return typedMatch;
}
// mpv EDL sources usually list audio streams first and video streams last, so
// when classifyMediaUrl cannot identify a typed URL we fall back to stream order.
return kind === 'audio' ? urls[0] ?? null : urls[urls.length - 1] ?? null;
}
export async function resolveMediaGenerationInputPath(
mpvClient: Pick<MpvClient, 'currentVideoPath' | 'requestProperty'> | null | undefined,
kind: MediaGenerationKind = 'video',
): Promise<string | null> {
const currentVideoPath = trimToNonEmptyString(mpvClient?.currentVideoPath);
if (!currentVideoPath) {
return null;
}
if (!isRemoteMediaPath(currentVideoPath) || !mpvClient?.requestProperty) {
return currentVideoPath;
}
try {
const streamOpenFilename = trimToNonEmptyString(
await mpvClient.requestProperty('stream-open-filename'),
);
if (streamOpenFilename?.startsWith('edl://')) {
return resolvePreferredUrlFromMpvEdlSource(streamOpenFilename, kind) ?? streamOpenFilename;
}
if (streamOpenFilename) {
return streamOpenFilename;
}
} catch {
// Fall back to the current path when mpv does not expose a resolved stream URL.
}
return currentVideoPath;
}

View File

@@ -26,6 +26,7 @@ function createRuntime(
start: ({ host, port, upstreamUrl }) => start: ({ host, port, upstreamUrl }) =>
calls.push(`proxy:start:${host}:${port}:${upstreamUrl}`), calls.push(`proxy:start:${host}:${port}:${upstreamUrl}`),
stop: () => calls.push('proxy:stop'), stop: () => calls.push('proxy:stop'),
waitUntilReady: async () => undefined,
}), }),
logInfo: () => undefined, logInfo: () => undefined,
logWarn: () => undefined, logWarn: () => undefined,
@@ -80,6 +81,44 @@ test('AnkiIntegrationRuntime starts proxy transport when proxy mode is enabled',
assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']); assert.deepEqual(calls, ['known:start', 'proxy:start:127.0.0.1:9999:http://upstream:8765']);
}); });
test('AnkiIntegrationRuntime waits for proxy readiness when proxy mode is enabled', async () => {
let releaseReady!: () => void;
const waitUntilReadyCalls: string[] = [];
const readyPromise = new Promise<void>((resolve) => {
releaseReady = resolve;
});
const { runtime } = createRuntime(
{
proxy: {
enabled: true,
host: '127.0.0.1',
port: 9999,
upstreamUrl: 'http://upstream:8765',
},
},
{
proxyServerFactory: () => ({
start: () => undefined,
stop: () => undefined,
waitUntilReady: async () => {
waitUntilReadyCalls.push('proxy:wait-until-ready');
await readyPromise;
},
}),
},
);
runtime.start();
const waitPromise = runtime.waitUntilReady().then(() => {
waitUntilReadyCalls.push('proxy:ready');
});
assert.deepEqual(waitUntilReadyCalls, ['proxy:wait-until-ready']);
releaseReady();
await waitPromise;
assert.deepEqual(waitUntilReadyCalls, ['proxy:wait-until-ready', 'proxy:ready']);
});
test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => { test('AnkiIntegrationRuntime switches transports and clears known words when runtime patch disables highlighting', () => {
const { runtime, calls } = createRuntime({ const { runtime, calls } = createRuntime({
knownWords: { knownWords: {

View File

@@ -9,6 +9,7 @@ import {
export interface AnkiIntegrationRuntimeProxyServer { export interface AnkiIntegrationRuntimeProxyServer {
start(options: { host: string; port: number; upstreamUrl: string }): void; start(options: { host: string; port: number; upstreamUrl: string }): void;
stop(): void; stop(): void;
waitUntilReady(): Promise<void>;
} }
interface AnkiIntegrationRuntimeDeps { interface AnkiIntegrationRuntimeDeps {
@@ -131,6 +132,13 @@ export class AnkiIntegrationRuntime {
return this.config; return this.config;
} }
waitUntilReady(): Promise<void> {
if (!this.started || !this.isProxyTransportEnabled()) {
return Promise.resolve();
}
return this.getOrCreateProxyServer().waitUntilReady();
}
start(): void { start(): void {
if (this.started) { if (this.started) {
this.stop(); this.stop();

View File

@@ -56,6 +56,15 @@ test('parseArgs captures launch-mpv targets and keeps it out of app startup', ()
assert.equal(shouldStartApp(args), false); assert.equal(shouldStartApp(args), false);
}); });
test('parseArgs captures youtube playback commands and mode', () => {
const args = parseArgs(['--youtube-play', 'https://youtube.com/watch?v=abc', '--youtube-mode', 'generate']);
assert.equal(args.youtubePlay, 'https://youtube.com/watch?v=abc');
assert.equal(args.youtubeMode, 'generate');
assert.equal(hasExplicitCommand(args), true);
assert.equal(shouldStartApp(args), true);
});
test('parseArgs handles jellyfin item listing controls', () => { test('parseArgs handles jellyfin item listing controls', () => {
const args = parseArgs([ const args = parseArgs([
'--jellyfin-items', '--jellyfin-items',

View File

@@ -3,6 +3,8 @@ export interface CliArgs {
start: boolean; start: boolean;
launchMpv: boolean; launchMpv: boolean;
launchMpvTargets: string[]; launchMpvTargets: string[];
youtubePlay?: string;
youtubeMode?: 'download' | 'generate';
stop: boolean; stop: boolean;
toggle: boolean; toggle: boolean;
toggleVisibleOverlay: boolean; toggleVisibleOverlay: boolean;
@@ -79,6 +81,8 @@ export function parseArgs(argv: string[]): CliArgs {
start: false, start: false,
launchMpv: false, launchMpv: false,
launchMpvTargets: [], launchMpvTargets: [],
youtubePlay: undefined,
youtubeMode: undefined,
stop: false, stop: false,
toggle: false, toggle: false,
toggleVisibleOverlay: false, toggleVisibleOverlay: false,
@@ -140,7 +144,19 @@ export function parseArgs(argv: string[]): CliArgs {
if (arg === '--background') args.background = true; if (arg === '--background') args.background = true;
else if (arg === '--start') args.start = true; else if (arg === '--start') args.start = true;
else if (arg === '--launch-mpv') { else if (arg.startsWith('--youtube-play=')) {
const value = arg.split('=', 2)[1];
if (value) args.youtubePlay = value;
} else if (arg === '--youtube-play') {
const value = readValue(argv[i + 1]);
if (value) args.youtubePlay = value;
} else if (arg.startsWith('--youtube-mode=')) {
const value = arg.split('=', 2)[1];
if (value === 'download' || value === 'generate') args.youtubeMode = value;
} else if (arg === '--youtube-mode') {
const value = readValue(argv[i + 1]);
if (value === 'download' || value === 'generate') args.youtubeMode = value;
} else if (arg === '--launch-mpv') {
args.launchMpv = true; args.launchMpv = true;
args.launchMpvTargets = argv.slice(i + 1).filter((value) => value && !value.startsWith('--')); args.launchMpvTargets = argv.slice(i + 1).filter((value) => value && !value.startsWith('--'));
break; break;
@@ -334,6 +350,7 @@ export function hasExplicitCommand(args: CliArgs): boolean {
return ( return (
args.background || args.background ||
args.start || args.start ||
Boolean(args.youtubePlay) ||
args.launchMpv || args.launchMpv ||
args.stop || args.stop ||
args.toggle || args.toggle ||
@@ -385,6 +402,7 @@ export function shouldStartApp(args: CliArgs): boolean {
if ( if (
args.background || args.background ||
args.start || args.start ||
Boolean(args.youtubePlay) ||
args.launchMpv || args.launchMpv ||
args.toggle || args.toggle ||
args.toggleVisibleOverlay || args.toggleVisibleOverlay ||
@@ -452,6 +470,7 @@ export function shouldRunSettingsOnlyStartup(args: CliArgs): boolean {
!args.jellyfinItems && !args.jellyfinItems &&
!args.jellyfinSubtitles && !args.jellyfinSubtitles &&
!args.jellyfinPlay && !args.jellyfinPlay &&
!args.youtubePlay &&
!args.jellyfinRemoteAnnounce && !args.jellyfinRemoteAnnounce &&
!args.jellyfinPreviewAuth && !args.jellyfinPreviewAuth &&
!args.texthooker && !args.texthooker &&
@@ -481,5 +500,6 @@ export function commandNeedsOverlayRuntime(args: CliArgs): boolean {
args.triggerSubsync || args.triggerSubsync ||
args.markAudioCard || args.markAudioCard ||
args.openRuntimeOptions args.openRuntimeOptions
|| Boolean(args.youtubePlay)
); );
} }

View File

@@ -13,6 +13,8 @@ ${B}Session${R}
--background Start in tray/background mode --background Start in tray/background mode
--start Connect to mpv and launch overlay --start Connect to mpv and launch overlay
--launch-mpv ${D}[targets...]${R} Launch mpv with the SubMiner mpv profile and exit --launch-mpv ${D}[targets...]${R} Launch mpv with the SubMiner mpv profile and exit
--youtube-play ${D}URL${R} Open YouTube subtitle picker flow for a URL
--youtube-mode ${D}download|generate${R} Subtitle acquisition mode for YouTube flow
--stop Stop the running instance --stop Stop the running instance
--stats Open the stats dashboard in your browser --stats Open the stats dashboard in your browser
--texthooker Start texthooker server only ${D}(no overlay)${R} --texthooker Start texthooker server only ${D}(no overlay)${R}

View File

@@ -36,7 +36,7 @@ const {
} = CORE_DEFAULT_CONFIG; } = CORE_DEFAULT_CONFIG;
const { ankiConnect, jimaku, anilist, yomitan, jellyfin, discordPresence, ai, youtubeSubgen } = const { ankiConnect, jimaku, anilist, yomitan, jellyfin, discordPresence, ai, youtubeSubgen } =
INTEGRATIONS_DEFAULT_CONFIG; INTEGRATIONS_DEFAULT_CONFIG;
const { subtitleStyle } = SUBTITLE_DEFAULT_CONFIG; const { subtitleStyle, subtitleSidebar } = SUBTITLE_DEFAULT_CONFIG;
const { immersionTracking } = IMMERSION_DEFAULT_CONFIG; const { immersionTracking } = IMMERSION_DEFAULT_CONFIG;
const { stats } = STATS_DEFAULT_CONFIG; const { stats } = STATS_DEFAULT_CONFIG;
@@ -54,6 +54,7 @@ export const DEFAULT_CONFIG: ResolvedConfig = {
subsync, subsync,
startupWarmups, startupWarmups,
subtitleStyle, subtitleStyle,
subtitleSidebar,
auto_start_overlay, auto_start_overlay,
jimaku, jimaku,
anilist, anilist,

View File

@@ -1,6 +1,6 @@
import { ResolvedConfig } from '../../types'; import { ResolvedConfig } from '../../types';
export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = { export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle' | 'subtitleSidebar'> = {
subtitleStyle: { subtitleStyle: {
enableJlpt: false, enableJlpt: false,
preserveLineBreaks: false, preserveLineBreaks: false,
@@ -57,4 +57,22 @@ export const SUBTITLE_DEFAULT_CONFIG: Pick<ResolvedConfig, 'subtitleStyle'> = {
fontStyle: 'normal', fontStyle: 'normal',
}, },
}, },
subtitleSidebar: {
enabled: false,
autoOpen: false,
layout: 'overlay',
toggleKey: 'Backslash',
pauseVideoOnHover: false,
autoScroll: true,
maxWidth: 420,
opacity: 0.95,
backgroundColor: 'rgba(73, 77, 100, 0.9)',
textColor: '#cad3f5',
fontFamily: '"M PLUS 1", "Noto Sans CJK JP", sans-serif',
fontSize: 16,
timestampColor: '#a5adcb',
activeLineColor: '#f5bde6',
activeLineBackgroundColor: 'rgba(138, 173, 244, 0.22)',
hoverLineBackgroundColor: 'rgba(54, 58, 79, 0.84)',
},
}; };

View File

@@ -110,5 +110,102 @@ export function buildSubtitleConfigOptionRegistry(
description: description:
'Five colors used for rank bands when mode is `banded` (from most common to least within topX).', 'Five colors used for rank bands when mode is `banded` (from most common to least within topX).',
}, },
{
path: 'subtitleSidebar.enabled',
kind: 'boolean',
defaultValue: defaultConfig.subtitleSidebar.enabled,
description: 'Enable the subtitle sidebar feature for parsed subtitle sources.',
},
{
path: 'subtitleSidebar.autoOpen',
kind: 'boolean',
defaultValue: defaultConfig.subtitleSidebar.autoOpen,
description: 'Automatically open the subtitle sidebar once during overlay startup.',
},
{
path: 'subtitleSidebar.layout',
kind: 'enum',
enumValues: ['overlay', 'embedded'],
defaultValue: defaultConfig.subtitleSidebar.layout,
description: 'Render the subtitle sidebar as a floating overlay or reserve space inside mpv.',
},
{
path: 'subtitleSidebar.toggleKey',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.toggleKey,
description: 'KeyboardEvent.code used to toggle the subtitle sidebar open and closed.',
},
{
path: 'subtitleSidebar.pauseVideoOnHover',
kind: 'boolean',
defaultValue: defaultConfig.subtitleSidebar.pauseVideoOnHover,
description: 'Pause mpv while hovering the subtitle sidebar, then resume on leave.',
},
{
path: 'subtitleSidebar.autoScroll',
kind: 'boolean',
defaultValue: defaultConfig.subtitleSidebar.autoScroll,
description: 'Auto-scroll the active subtitle cue into view while playback advances.',
},
{
path: 'subtitleSidebar.maxWidth',
kind: 'number',
defaultValue: defaultConfig.subtitleSidebar.maxWidth,
description: 'Maximum sidebar width in CSS pixels.',
},
{
path: 'subtitleSidebar.opacity',
kind: 'number',
defaultValue: defaultConfig.subtitleSidebar.opacity,
description: 'Base opacity applied to the sidebar shell.',
},
{
path: 'subtitleSidebar.backgroundColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.backgroundColor,
description: 'Background color for the subtitle sidebar shell.',
},
{
path: 'subtitleSidebar.textColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.textColor,
description: 'Default cue text color in the subtitle sidebar.',
},
{
path: 'subtitleSidebar.fontFamily',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.fontFamily,
description: 'Font family used for subtitle sidebar cue text.',
},
{
path: 'subtitleSidebar.fontSize',
kind: 'number',
defaultValue: defaultConfig.subtitleSidebar.fontSize,
description: 'Base font size for subtitle sidebar cue text in CSS pixels.',
},
{
path: 'subtitleSidebar.timestampColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.timestampColor,
description: 'Timestamp color in the subtitle sidebar.',
},
{
path: 'subtitleSidebar.activeLineColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.activeLineColor,
description: 'Text color for the active subtitle cue.',
},
{
path: 'subtitleSidebar.activeLineBackgroundColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.activeLineBackgroundColor,
description: 'Background color for the active subtitle cue.',
},
{
path: 'subtitleSidebar.hoverLineBackgroundColor',
kind: 'string',
defaultValue: defaultConfig.subtitleSidebar.hoverLineBackgroundColor,
description: 'Background color for hovered subtitle cues.',
},
]; ];
} }

View File

@@ -98,6 +98,12 @@ const SUBTITLE_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [
notes: ['Hot-reload: subtitle style changes apply live without restarting SubMiner.'], notes: ['Hot-reload: subtitle style changes apply live without restarting SubMiner.'],
key: 'subtitleStyle', key: 'subtitleStyle',
}, },
{
title: 'Subtitle Sidebar',
description: ['Parsed-subtitle sidebar cue list styling, behavior, and toggle key.'],
notes: ['Hot-reload: subtitle sidebar changes apply live without restarting SubMiner.'],
key: 'subtitleSidebar',
},
]; ];
const INTEGRATION_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [ const INTEGRATION_TEMPLATE_SECTIONS: ConfigTemplateSection[] = [

View File

@@ -15,6 +15,22 @@ export function asBoolean(value: unknown): boolean | undefined {
} }
const hexColorPattern = /^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$/; const hexColorPattern = /^#(?:[0-9a-fA-F]{3}|[0-9a-fA-F]{4}|[0-9a-fA-F]{6}|[0-9a-fA-F]{8})$/;
const cssColorKeywords = new Set([
'transparent',
'currentcolor',
'inherit',
'initial',
'unset',
'revert',
'revert-layer',
]);
const cssColorFunctionPattern = /^(?:rgba?|hsla?)\(\s*[^()]+?\s*\)$/i;
function supportsCssColor(text: string): boolean {
const css = (globalThis as { CSS?: { supports?: (property: string, value: string) => boolean } })
.CSS;
return css?.supports?.('color', text) ?? false;
}
export function asColor(value: unknown): string | undefined { export function asColor(value: unknown): string | undefined {
if (typeof value !== 'string') return undefined; if (typeof value !== 'string') return undefined;
@@ -22,6 +38,30 @@ export function asColor(value: unknown): string | undefined {
return hexColorPattern.test(text) ? text : undefined; return hexColorPattern.test(text) ? text : undefined;
} }
export function asCssColor(value: unknown): string | undefined {
if (typeof value !== 'string') return undefined;
const text = value.trim();
if (text.length === 0) {
return undefined;
}
if (supportsCssColor(text)) {
return text;
}
const normalized = text.toLowerCase();
if (
hexColorPattern.test(text) ||
cssColorKeywords.has(normalized) ||
cssColorFunctionPattern.test(text)
) {
return text;
}
return undefined;
}
export function asFrequencyBandedColors( export function asFrequencyBandedColors(
value: unknown, value: unknown,
): [string, string, string, string, string] | undefined { ): [string, string, string, string, string] | undefined {

View File

@@ -3,6 +3,7 @@ import { ResolveContext } from './context';
import { import {
asBoolean, asBoolean,
asColor, asColor,
asCssColor,
asFrequencyBandedColors, asFrequencyBandedColors,
asNumber, asNumber,
asString, asString,
@@ -418,4 +419,180 @@ export function applySubtitleDomainConfig(context: ResolveContext): void {
); );
} }
} }
if (isObject(src.subtitleSidebar)) {
const fallback = { ...resolved.subtitleSidebar };
resolved.subtitleSidebar = {
...resolved.subtitleSidebar,
...(src.subtitleSidebar as ResolvedConfig['subtitleSidebar']),
};
const enabled = asBoolean((src.subtitleSidebar as { enabled?: unknown }).enabled);
if (enabled !== undefined) {
resolved.subtitleSidebar.enabled = enabled;
} else if ((src.subtitleSidebar as { enabled?: unknown }).enabled !== undefined) {
resolved.subtitleSidebar.enabled = fallback.enabled;
warn(
'subtitleSidebar.enabled',
(src.subtitleSidebar as { enabled?: unknown }).enabled,
resolved.subtitleSidebar.enabled,
'Expected boolean.',
);
}
const autoOpen = asBoolean((src.subtitleSidebar as { autoOpen?: unknown }).autoOpen);
if (autoOpen !== undefined) {
resolved.subtitleSidebar.autoOpen = autoOpen;
} else if ((src.subtitleSidebar as { autoOpen?: unknown }).autoOpen !== undefined) {
resolved.subtitleSidebar.autoOpen = fallback.autoOpen;
warn(
'subtitleSidebar.autoOpen',
(src.subtitleSidebar as { autoOpen?: unknown }).autoOpen,
resolved.subtitleSidebar.autoOpen,
'Expected boolean.',
);
}
const layout = asString((src.subtitleSidebar as { layout?: unknown }).layout);
if (layout === 'overlay' || layout === 'embedded') {
resolved.subtitleSidebar.layout = layout;
} else if ((src.subtitleSidebar as { layout?: unknown }).layout !== undefined) {
resolved.subtitleSidebar.layout = fallback.layout;
warn(
'subtitleSidebar.layout',
(src.subtitleSidebar as { layout?: unknown }).layout,
resolved.subtitleSidebar.layout,
'Expected "overlay" or "embedded".',
);
}
const pauseVideoOnHover = asBoolean(
(src.subtitleSidebar as { pauseVideoOnHover?: unknown }).pauseVideoOnHover,
);
if (pauseVideoOnHover !== undefined) {
resolved.subtitleSidebar.pauseVideoOnHover = pauseVideoOnHover;
} else if ((src.subtitleSidebar as { pauseVideoOnHover?: unknown }).pauseVideoOnHover !== undefined) {
resolved.subtitleSidebar.pauseVideoOnHover = fallback.pauseVideoOnHover;
warn(
'subtitleSidebar.pauseVideoOnHover',
(src.subtitleSidebar as { pauseVideoOnHover?: unknown }).pauseVideoOnHover,
resolved.subtitleSidebar.pauseVideoOnHover,
'Expected boolean.',
);
}
const autoScroll = asBoolean((src.subtitleSidebar as { autoScroll?: unknown }).autoScroll);
if (autoScroll !== undefined) {
resolved.subtitleSidebar.autoScroll = autoScroll;
} else if ((src.subtitleSidebar as { autoScroll?: unknown }).autoScroll !== undefined) {
resolved.subtitleSidebar.autoScroll = fallback.autoScroll;
warn(
'subtitleSidebar.autoScroll',
(src.subtitleSidebar as { autoScroll?: unknown }).autoScroll,
resolved.subtitleSidebar.autoScroll,
'Expected boolean.',
);
}
const toggleKey = asString((src.subtitleSidebar as { toggleKey?: unknown }).toggleKey);
if (toggleKey !== undefined) {
resolved.subtitleSidebar.toggleKey = toggleKey;
} else if ((src.subtitleSidebar as { toggleKey?: unknown }).toggleKey !== undefined) {
resolved.subtitleSidebar.toggleKey = fallback.toggleKey;
warn(
'subtitleSidebar.toggleKey',
(src.subtitleSidebar as { toggleKey?: unknown }).toggleKey,
resolved.subtitleSidebar.toggleKey,
'Expected string.',
);
}
const maxWidth = asNumber((src.subtitleSidebar as { maxWidth?: unknown }).maxWidth);
if (maxWidth !== undefined && maxWidth > 0) {
resolved.subtitleSidebar.maxWidth = Math.floor(maxWidth);
} else if ((src.subtitleSidebar as { maxWidth?: unknown }).maxWidth !== undefined) {
resolved.subtitleSidebar.maxWidth = fallback.maxWidth;
warn(
'subtitleSidebar.maxWidth',
(src.subtitleSidebar as { maxWidth?: unknown }).maxWidth,
resolved.subtitleSidebar.maxWidth,
'Expected positive number.',
);
}
const opacity = asNumber((src.subtitleSidebar as { opacity?: unknown }).opacity);
if (opacity !== undefined && opacity >= 0 && opacity <= 1) {
resolved.subtitleSidebar.opacity = opacity;
} else if ((src.subtitleSidebar as { opacity?: unknown }).opacity !== undefined) {
resolved.subtitleSidebar.opacity = fallback.opacity;
warn(
'subtitleSidebar.opacity',
(src.subtitleSidebar as { opacity?: unknown }).opacity,
resolved.subtitleSidebar.opacity,
'Expected number between 0 and 1.',
);
}
const hexColorFields = ['textColor', 'timestampColor', 'activeLineColor'] as const;
for (const field of hexColorFields) {
const value = asColor((src.subtitleSidebar as Record<string, unknown>)[field]);
if (value !== undefined) {
resolved.subtitleSidebar[field] = value;
} else if ((src.subtitleSidebar as Record<string, unknown>)[field] !== undefined) {
resolved.subtitleSidebar[field] = fallback[field];
warn(
`subtitleSidebar.${field}`,
(src.subtitleSidebar as Record<string, unknown>)[field],
resolved.subtitleSidebar[field],
'Expected hex color.',
);
}
}
const cssColorFields = [
'backgroundColor',
'activeLineBackgroundColor',
'hoverLineBackgroundColor',
] as const;
for (const field of cssColorFields) {
const value = asCssColor((src.subtitleSidebar as Record<string, unknown>)[field]);
if (value !== undefined) {
resolved.subtitleSidebar[field] = value;
} else if ((src.subtitleSidebar as Record<string, unknown>)[field] !== undefined) {
resolved.subtitleSidebar[field] = fallback[field];
warn(
`subtitleSidebar.${field}`,
(src.subtitleSidebar as Record<string, unknown>)[field],
resolved.subtitleSidebar[field],
'Expected valid CSS color.',
);
}
}
const fontFamily = asString((src.subtitleSidebar as { fontFamily?: unknown }).fontFamily);
if (fontFamily !== undefined && fontFamily.trim().length > 0) {
resolved.subtitleSidebar.fontFamily = fontFamily.trim();
} else if ((src.subtitleSidebar as { fontFamily?: unknown }).fontFamily !== undefined) {
resolved.subtitleSidebar.fontFamily = fallback.fontFamily;
warn(
'subtitleSidebar.fontFamily',
(src.subtitleSidebar as { fontFamily?: unknown }).fontFamily,
resolved.subtitleSidebar.fontFamily,
'Expected non-empty string.',
);
}
const fontSize = asNumber((src.subtitleSidebar as { fontSize?: unknown }).fontSize);
if (fontSize !== undefined && fontSize > 0) {
resolved.subtitleSidebar.fontSize = fontSize;
} else if ((src.subtitleSidebar as { fontSize?: unknown }).fontSize !== undefined) {
resolved.subtitleSidebar.fontSize = fallback.fontSize;
warn(
'subtitleSidebar.fontSize',
(src.subtitleSidebar as { fontSize?: unknown }).fontSize,
resolved.subtitleSidebar.fontSize,
'Expected positive number.',
);
}
}
} }

View File

@@ -0,0 +1,93 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import { createResolveContext } from './context';
import { applySubtitleDomainConfig } from './subtitle-domains';
test('subtitleSidebar resolves valid values and preserves dedicated defaults', () => {
const { context } = createResolveContext({
subtitleSidebar: {
enabled: true,
autoOpen: true,
layout: 'embedded',
toggleKey: 'KeyB',
pauseVideoOnHover: true,
autoScroll: false,
maxWidth: 540,
opacity: 0.72,
backgroundColor: 'rgba(36, 39, 58, 0.72)',
textColor: '#cad3f5',
fontFamily: '"Iosevka Aile", sans-serif',
fontSize: 17,
timestampColor: '#a5adcb',
activeLineColor: '#f5bde6',
activeLineBackgroundColor: 'rgba(138, 173, 244, 0.22)',
hoverLineBackgroundColor: 'rgba(54, 58, 79, 0.84)',
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleSidebar.enabled, true);
assert.equal(context.resolved.subtitleSidebar.autoOpen, true);
assert.equal(context.resolved.subtitleSidebar.layout, 'embedded');
assert.equal(context.resolved.subtitleSidebar.toggleKey, 'KeyB');
assert.equal(context.resolved.subtitleSidebar.pauseVideoOnHover, true);
assert.equal(context.resolved.subtitleSidebar.autoScroll, false);
assert.equal(context.resolved.subtitleSidebar.maxWidth, 540);
assert.equal(context.resolved.subtitleSidebar.opacity, 0.72);
assert.equal(context.resolved.subtitleSidebar.fontFamily, '"Iosevka Aile", sans-serif');
assert.equal(context.resolved.subtitleSidebar.fontSize, 17);
});
test('subtitleSidebar accepts zero opacity', () => {
const { context, warnings } = createResolveContext({
subtitleSidebar: {
opacity: 0,
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleSidebar.opacity, 0);
assert.equal(warnings.some((warning) => warning.path === 'subtitleSidebar.opacity'), false);
});
test('subtitleSidebar falls back and warns on invalid values', () => {
const { context, warnings } = createResolveContext({
subtitleSidebar: {
enabled: 'yes' as never,
autoOpen: 'yes' as never,
layout: 'floating' as never,
maxWidth: -1,
opacity: 5,
fontSize: 0,
textColor: 'blue',
backgroundColor: 'not-a-color',
},
});
applySubtitleDomainConfig(context);
assert.equal(context.resolved.subtitleSidebar.enabled, false);
assert.equal(context.resolved.subtitleSidebar.autoOpen, false);
assert.equal(context.resolved.subtitleSidebar.layout, 'overlay');
assert.equal(context.resolved.subtitleSidebar.maxWidth, 420);
assert.equal(context.resolved.subtitleSidebar.opacity, 0.95);
assert.equal(context.resolved.subtitleSidebar.fontSize, 16);
assert.equal(context.resolved.subtitleSidebar.textColor, '#cad3f5');
assert.equal(context.resolved.subtitleSidebar.backgroundColor, 'rgba(73, 77, 100, 0.9)');
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.enabled'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.autoOpen'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.layout'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.maxWidth'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.opacity'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.fontSize'));
assert.ok(warnings.some((warning) => warning.path === 'subtitleSidebar.textColor'));
assert.ok(
warnings.some(
(warning) =>
warning.path === 'subtitleSidebar.backgroundColor' &&
warning.message === 'Expected valid CSS color.',
),
);
});

View File

@@ -9,6 +9,8 @@ function makeArgs(overrides: Partial<CliArgs> = {}): CliArgs {
start: false, start: false,
launchMpv: false, launchMpv: false,
launchMpvTargets: [], launchMpvTargets: [],
youtubePlay: undefined,
youtubeMode: undefined,
stop: false, stop: false,
toggle: false, toggle: false,
toggleVisibleOverlay: false, toggleVisibleOverlay: false,
@@ -184,6 +186,9 @@ function createDeps(overrides: Partial<CliCommandServiceDeps> = {}) {
runJellyfinCommand: async () => { runJellyfinCommand: async () => {
calls.push('runJellyfinCommand'); calls.push('runJellyfinCommand');
}, },
runYoutubePlaybackFlow: async ({ url, mode }) => {
calls.push(`runYoutubePlaybackFlow:${url}:${mode}`);
},
printHelp: () => { printHelp: () => {
calls.push('printHelp'); calls.push('printHelp');
}, },
@@ -226,6 +231,40 @@ test('handleCliCommand reconnects MPV for second-instance --start when overlay r
); );
}); });
test('handleCliCommand starts youtube playback flow on initial launch', () => {
const { deps, calls } = createDeps({
runYoutubePlaybackFlow: async (request) => {
calls.push(`youtube:${request.url}:${request.mode}`);
},
});
handleCliCommand(
makeArgs({ youtubePlay: 'https://youtube.com/watch?v=abc', youtubeMode: 'generate' }),
'initial',
deps,
);
assert.deepEqual(calls, [
'initializeOverlayRuntime',
'youtube:https://youtube.com/watch?v=abc:generate',
]);
});
test('handleCliCommand defaults youtube mode to download when omitted', () => {
const { deps, calls } = createDeps({
runYoutubePlaybackFlow: async (request) => {
calls.push(`youtube:${request.url}:${request.mode}`);
},
});
handleCliCommand(makeArgs({ youtubePlay: 'https://youtube.com/watch?v=abc' }), 'initial', deps);
assert.deepEqual(calls, [
'initializeOverlayRuntime',
'youtube:https://youtube.com/watch?v=abc:download',
]);
});
test('handleCliCommand processes --start for second-instance when overlay runtime is not initialized', () => { test('handleCliCommand processes --start for second-instance when overlay runtime is not initialized', () => {
const { deps, calls } = createDeps(); const { deps, calls } = createDeps();
const args = makeArgs({ start: true }); const args = makeArgs({ start: true });

View File

@@ -63,6 +63,11 @@ export interface CliCommandServiceDeps {
}>; }>;
runStatsCommand: (args: CliArgs, source: CliCommandSource) => Promise<void>; runStatsCommand: (args: CliArgs, source: CliCommandSource) => Promise<void>;
runJellyfinCommand: (args: CliArgs) => Promise<void>; runJellyfinCommand: (args: CliArgs) => Promise<void>;
runYoutubePlaybackFlow: (request: {
url: string;
mode: NonNullable<CliArgs['youtubeMode']>;
source: CliCommandSource;
}) => Promise<void>;
printHelp: () => void; printHelp: () => void;
hasMainWindow: () => boolean; hasMainWindow: () => boolean;
getMultiCopyTimeoutMs: () => number; getMultiCopyTimeoutMs: () => number;
@@ -135,6 +140,7 @@ interface AnilistCliRuntime {
interface AppCliRuntime { interface AppCliRuntime {
stop: () => void; stop: () => void;
hasMainWindow: () => boolean; hasMainWindow: () => boolean;
runYoutubePlaybackFlow: CliCommandServiceDeps['runYoutubePlaybackFlow'];
} }
export interface CliCommandDepsRuntimeOptions { export interface CliCommandDepsRuntimeOptions {
@@ -226,6 +232,7 @@ export function createCliCommandDepsRuntime(
generateCharacterDictionary: options.dictionary.generate, generateCharacterDictionary: options.dictionary.generate,
runStatsCommand: options.jellyfin.runStatsCommand, runStatsCommand: options.jellyfin.runStatsCommand,
runJellyfinCommand: options.jellyfin.runCommand, runJellyfinCommand: options.jellyfin.runCommand,
runYoutubePlaybackFlow: options.app.runYoutubePlaybackFlow,
printHelp: options.ui.printHelp, printHelp: options.ui.printHelp,
hasMainWindow: options.app.hasMainWindow, hasMainWindow: options.app.hasMainWindow,
getMultiCopyTimeoutMs: options.getMultiCopyTimeoutMs, getMultiCopyTimeoutMs: options.getMultiCopyTimeoutMs,
@@ -396,6 +403,19 @@ export function handleCliCommand(
} else if (args.jellyfin) { } else if (args.jellyfin) {
deps.openJellyfinSetup(); deps.openJellyfinSetup();
deps.log('Opened Jellyfin setup flow.'); deps.log('Opened Jellyfin setup flow.');
} else if (args.youtubePlay) {
const youtubeUrl = args.youtubePlay;
runAsyncWithOsd(
() =>
deps.runYoutubePlaybackFlow({
url: youtubeUrl,
mode: args.youtubeMode ?? 'download',
source,
}),
deps,
'runYoutubePlaybackFlow',
'YouTube playback failed',
);
} else if (args.dictionary) { } else if (args.dictionary) {
const shouldStopAfterRun = source === 'initial' && !deps.hasMainWindow(); const shouldStopAfterRun = source === 'initial' && !deps.hasMainWindow();
deps.log('Generating character dictionary for current anime...'); deps.log('Generating character dictionary for current anime...');

View File

@@ -42,6 +42,9 @@ function classifyDiff(prev: ResolvedConfig, next: ResolvedConfig): ConfigHotRelo
if (!isEqual(prev.shortcuts, next.shortcuts)) { if (!isEqual(prev.shortcuts, next.shortcuts)) {
hotReloadFields.push('shortcuts'); hotReloadFields.push('shortcuts');
} }
if (!isEqual(prev.subtitleSidebar, next.subtitleSidebar)) {
hotReloadFields.push('subtitleSidebar');
}
if (prev.secondarySub.defaultMode !== next.secondarySub.defaultMode) { if (prev.secondarySub.defaultMode !== next.secondarySub.defaultMode) {
hotReloadFields.push('secondarySub.defaultMode'); hotReloadFields.push('secondarySub.defaultMode');
} }
@@ -55,7 +58,7 @@ function classifyDiff(prev: ResolvedConfig, next: ResolvedConfig): ConfigHotRelo
]); ]);
for (const key of keys) { for (const key of keys) {
if (key === 'subtitleStyle' || key === 'keybindings' || key === 'shortcuts') { if (key === 'subtitleStyle' || key === 'keybindings' || key === 'shortcuts' || key === 'subtitleSidebar') {
continue; continue;
} }

View File

@@ -37,6 +37,21 @@ async function waitForPendingAnimeMetadata(tracker: ImmersionTrackerService): Pr
await privateApi.pendingAnimeMetadataUpdates?.get(videoId); await privateApi.pendingAnimeMetadataUpdates?.get(videoId);
} }
async function waitForCondition(
predicate: () => boolean,
timeoutMs = 1_000,
intervalMs = 10,
): Promise<void> {
const deadline = Date.now() + timeoutMs;
while (Date.now() < deadline) {
if (predicate()) {
return;
}
await new Promise((resolve) => setTimeout(resolve, intervalMs));
}
assert.equal(predicate(), true);
}
function makeMergedToken(overrides: Partial<MergedToken>): MergedToken { function makeMergedToken(overrides: Partial<MergedToken>): MergedToken {
return { return {
surface: '', surface: '',
@@ -2297,6 +2312,132 @@ test('reassignAnimeAnilist preserves existing description when description is om
} }
}); });
test('handleMediaChange stores youtube metadata for new youtube sessions', async () => {
const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null;
const originalFetch = globalThis.fetch;
const originalPath = process.env.PATH;
let fakeBinDir: string | null = null;
try {
fakeBinDir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-yt-dlp-bin-'));
const ytDlpOutput =
'{"id":"abc123","title":"Video Name","webpage_url":"https://www.youtube.com/watch?v=abc123","thumbnail":"https://i.ytimg.com/vi/abc123/hqdefault.jpg","channel_id":"UCcreator123","channel":"Creator Name","channel_url":"https://www.youtube.com/channel/UCcreator123","uploader_id":"@creator","uploader_url":"https://www.youtube.com/@creator","description":"Video description","channel_follower_count":12345,"thumbnails":[{"url":"https://i.ytimg.com/vi/abc123/hqdefault.jpg"},{"url":"https://yt3.googleusercontent.com/channel-avatar=s88"}]}';
if (process.platform === 'win32') {
const outputPath = path.join(fakeBinDir, 'output.json');
fs.writeFileSync(outputPath, ytDlpOutput, 'utf8');
fs.writeFileSync(
path.join(fakeBinDir, 'yt-dlp.cmd'),
'@echo off\r\ntype "%~dp0output.json"\r\n',
'utf8',
);
} else {
const scriptPath = path.join(fakeBinDir, 'yt-dlp');
fs.writeFileSync(
scriptPath,
`#!/bin/sh
printf '%s\n' '${ytDlpOutput}'
`,
{ mode: 0o755 },
);
}
process.env.PATH = `${fakeBinDir}${path.delimiter}${originalPath ?? ''}`;
globalThis.fetch = async (input) => {
const url = String(input);
if (url.includes('/oembed')) {
return new Response(
JSON.stringify({
thumbnail_url: 'https://i.ytimg.com/vi/abc123/hqdefault.jpg',
}),
{ status: 200, headers: { 'Content-Type': 'application/json' } },
);
}
return new Response(new Uint8Array([1, 2, 3]), {
status: 200,
headers: { 'Content-Type': 'image/jpeg' },
});
};
const Ctor = await loadTrackerCtor();
tracker = new Ctor({ dbPath });
tracker.handleMediaChange('https://www.youtube.com/watch?v=abc123', 'Player Title');
const privateApi = tracker as unknown as { db: DatabaseSync };
await waitForCondition(
() => {
const stored = privateApi.db
.prepare("SELECT 1 AS ready FROM imm_youtube_videos WHERE youtube_video_id = 'abc123'")
.get() as { ready: number } | null;
return stored?.ready === 1;
},
5_000,
);
const row = privateApi.db
.prepare(
`
SELECT
youtube_video_id AS youtubeVideoId,
video_url AS videoUrl,
video_title AS videoTitle,
video_thumbnail_url AS videoThumbnailUrl,
channel_id AS channelId,
channel_name AS channelName,
channel_url AS channelUrl,
channel_thumbnail_url AS channelThumbnailUrl,
uploader_id AS uploaderId,
uploader_url AS uploaderUrl,
description AS description
FROM imm_youtube_videos
`,
)
.get() as {
youtubeVideoId: string;
videoUrl: string;
videoTitle: string;
videoThumbnailUrl: string;
channelId: string;
channelName: string;
channelUrl: string;
channelThumbnailUrl: string;
uploaderId: string;
uploaderUrl: string;
description: string;
} | null;
const videoRow = privateApi.db
.prepare(
`
SELECT canonical_title AS canonicalTitle
FROM imm_videos
WHERE video_id = 1
`,
)
.get() as { canonicalTitle: string } | null;
assert.ok(row);
assert.ok(videoRow);
assert.equal(row.youtubeVideoId, 'abc123');
assert.equal(row.videoUrl, 'https://www.youtube.com/watch?v=abc123');
assert.equal(row.videoTitle, 'Video Name');
assert.equal(row.videoThumbnailUrl, 'https://i.ytimg.com/vi/abc123/hqdefault.jpg');
assert.equal(row.channelId, 'UCcreator123');
assert.equal(row.channelName, 'Creator Name');
assert.equal(row.channelUrl, 'https://www.youtube.com/channel/UCcreator123');
assert.equal(row.channelThumbnailUrl, 'https://yt3.googleusercontent.com/channel-avatar=s88');
assert.equal(row.uploaderId, '@creator');
assert.equal(row.uploaderUrl, 'https://www.youtube.com/@creator');
assert.equal(row.description, 'Video description');
assert.equal(videoRow.canonicalTitle, 'Video Name');
} finally {
process.env.PATH = originalPath;
globalThis.fetch = originalFetch;
tracker?.destroy();
cleanupDbPath(dbPath);
if (fakeBinDir) {
fs.rmSync(fakeBinDir, { recursive: true, force: true });
}
}
});
test('reassignAnimeAnilist clears description when description is explicitly null', async () => { test('reassignAnimeAnilist clears description when description is explicitly null', async () => {
const dbPath = makeDbPath(); const dbPath = makeDbPath();
let tracker: ImmersionTrackerService | null = null; let tracker: ImmersionTrackerService | null = null;

View File

@@ -1,6 +1,7 @@
import path from 'node:path'; import path from 'node:path';
import * as fs from 'node:fs'; import * as fs from 'node:fs';
import { createLogger } from '../../logger'; import { createLogger } from '../../logger';
import { MediaGenerator } from '../../media-generator';
import type { CoverArtFetcher } from './anilist/cover-art-fetcher'; import type { CoverArtFetcher } from './anilist/cover-art-fetcher';
import { getLocalVideoMetadata, guessAnimeVideoMetadata } from './immersion-tracker/metadata'; import { getLocalVideoMetadata, guessAnimeVideoMetadata } from './immersion-tracker/metadata';
import { import {
@@ -22,6 +23,7 @@ import {
type TrackerPreparedStatements, type TrackerPreparedStatements,
updateVideoMetadataRecord, updateVideoMetadataRecord,
updateVideoTitleRecord, updateVideoTitleRecord,
upsertYoutubeVideoMetadata,
} from './immersion-tracker/storage'; } from './immersion-tracker/storage';
import { import {
applySessionLifetimeSummary, applySessionLifetimeSummary,
@@ -153,6 +155,104 @@ import {
import type { MergedToken } from '../../types'; import type { MergedToken } from '../../types';
import { shouldExcludeTokenFromVocabularyPersistence } from './tokenizer/annotation-stage'; import { shouldExcludeTokenFromVocabularyPersistence } from './tokenizer/annotation-stage';
import { deriveStoredPartOfSpeech } from './tokenizer/part-of-speech'; import { deriveStoredPartOfSpeech } from './tokenizer/part-of-speech';
import { probeYoutubeVideoMetadata } from './youtube/metadata-probe';
const YOUTUBE_COVER_RETRY_MS = 5 * 60 * 1000;
const YOUTUBE_SCREENSHOT_MAX_SECONDS = 120;
const YOUTUBE_OEMBED_ENDPOINT = 'https://www.youtube.com/oembed';
const YOUTUBE_ID_PATTERN = /^[A-Za-z0-9_-]{6,}$/;
function isValidYouTubeVideoId(value: string | null): boolean {
return Boolean(value && YOUTUBE_ID_PATTERN.test(value));
}
function extractYouTubeVideoId(mediaUrl: string): string | null {
let parsed: URL;
try {
parsed = new URL(mediaUrl);
} catch {
return null;
}
const host = parsed.hostname.toLowerCase();
if (
host !== 'youtu.be' &&
!host.endsWith('.youtu.be') &&
!host.endsWith('youtube.com') &&
!host.endsWith('youtube-nocookie.com')
) {
return null;
}
if (host === 'youtu.be' || host.endsWith('.youtu.be')) {
const pathId = parsed.pathname.split('/').filter(Boolean)[0];
return isValidYouTubeVideoId(pathId ?? null) ? (pathId as string) : null;
}
const queryId = parsed.searchParams.get('v') ?? parsed.searchParams.get('vi') ?? null;
if (isValidYouTubeVideoId(queryId)) {
return queryId;
}
const pathParts = parsed.pathname.split('/').filter(Boolean);
for (let i = 0; i < pathParts.length; i += 1) {
const current = pathParts[i];
const next = pathParts[i + 1];
if (!current || !next) continue;
if (
current.toLowerCase() === 'shorts' ||
current.toLowerCase() === 'embed' ||
current.toLowerCase() === 'live' ||
current.toLowerCase() === 'v'
) {
const candidate = decodeURIComponent(next);
if (isValidYouTubeVideoId(candidate)) {
return candidate;
}
}
}
return null;
}
function buildYouTubeThumbnailUrls(videoId: string): string[] {
return [
`https://i.ytimg.com/vi/${videoId}/maxresdefault.jpg`,
`https://i.ytimg.com/vi/${videoId}/hqdefault.jpg`,
`https://i.ytimg.com/vi/${videoId}/sddefault.jpg`,
`https://i.ytimg.com/vi/${videoId}/mqdefault.jpg`,
`https://i.ytimg.com/vi/${videoId}/0.jpg`,
`https://i.ytimg.com/vi/${videoId}/default.jpg`,
];
}
async function fetchYouTubeOEmbedThumbnail(mediaUrl: string): Promise<string | null> {
try {
const response = await fetch(`${YOUTUBE_OEMBED_ENDPOINT}?url=${encodeURIComponent(mediaUrl)}&format=json`);
if (!response.ok) {
return null;
}
const payload = (await response.json()) as { thumbnail_url?: unknown };
const candidate = typeof payload.thumbnail_url === 'string' ? payload.thumbnail_url.trim() : '';
return candidate || null;
} catch {
return null;
}
}
async function downloadImage(url: string): Promise<Buffer | null> {
try {
const response = await fetch(url);
if (!response.ok) return null;
const contentType = response.headers.get('content-type');
if (contentType && !contentType.toLowerCase().startsWith('image/')) {
return null;
}
return Buffer.from(await response.arrayBuffer());
} catch {
return null;
}
}
export type { export type {
AnimeAnilistEntryRow, AnimeAnilistEntryRow,
@@ -212,9 +312,11 @@ export class ImmersionTrackerService {
private sessionState: SessionState | null = null; private sessionState: SessionState | null = null;
private currentVideoKey = ''; private currentVideoKey = '';
private currentMediaPathOrUrl = ''; private currentMediaPathOrUrl = '';
private readonly mediaGenerator = new MediaGenerator();
private readonly preparedStatements: TrackerPreparedStatements; private readonly preparedStatements: TrackerPreparedStatements;
private coverArtFetcher: CoverArtFetcher | null = null; private coverArtFetcher: CoverArtFetcher | null = null;
private readonly pendingCoverFetches = new Map<number, Promise<boolean>>(); private readonly pendingCoverFetches = new Map<number, Promise<boolean>>();
private readonly pendingYoutubeMetadataFetches = new Map<number, Promise<void>>();
private readonly recordedSubtitleKeys = new Set<string>(); private readonly recordedSubtitleKeys = new Set<string>();
private readonly pendingAnimeMetadataUpdates = new Map<number, Promise<void>>(); private readonly pendingAnimeMetadataUpdates = new Map<number, Promise<void>>();
private readonly resolveLegacyVocabularyPos: private readonly resolveLegacyVocabularyPos:
@@ -647,6 +749,17 @@ export class ImmersionTrackerService {
if (existing?.coverBlob) { if (existing?.coverBlob) {
return true; return true;
} }
const row = this.db
.prepare('SELECT source_url AS sourceUrl FROM imm_videos WHERE video_id = ?')
.get(videoId) as { sourceUrl: string | null } | null;
const sourceUrl = row?.sourceUrl?.trim() ?? '';
const youtubeVideoId = sourceUrl ? extractYouTubeVideoId(sourceUrl) : null;
if (youtubeVideoId) {
const youtubePromise = this.ensureYouTubeCoverArt(videoId, sourceUrl, youtubeVideoId);
return await youtubePromise;
}
if (!this.coverArtFetcher) { if (!this.coverArtFetcher) {
return false; return false;
} }
@@ -677,6 +790,143 @@ export class ImmersionTrackerService {
} }
} }
private ensureYouTubeCoverArt(videoId: number, sourceUrl: string, youtubeVideoId: string): Promise<boolean> {
const existing = this.pendingCoverFetches.get(videoId);
if (existing) {
return existing;
}
const promise = this.captureYouTubeCoverArt(videoId, sourceUrl, youtubeVideoId);
this.pendingCoverFetches.set(videoId, promise);
promise.finally(() => {
this.pendingCoverFetches.delete(videoId);
});
return promise;
}
private async captureYouTubeCoverArt(
videoId: number,
sourceUrl: string,
youtubeVideoId: string,
): Promise<boolean> {
if (this.isDestroyed) return false;
const existing = await this.getCoverArt(videoId);
if (existing?.coverBlob) {
return true;
}
if (
existing?.coverUrl === null &&
existing?.anilistId === null &&
existing?.coverBlob === null &&
Date.now() - existing.fetchedAtMs < YOUTUBE_COVER_RETRY_MS
) {
return false;
}
let coverBlob: Buffer | null = null;
let coverUrl: string | null = null;
const embedThumbnailUrl = await fetchYouTubeOEmbedThumbnail(sourceUrl);
if (embedThumbnailUrl) {
const embedBlob = await downloadImage(embedThumbnailUrl);
if (embedBlob) {
coverBlob = embedBlob;
coverUrl = embedThumbnailUrl;
}
}
if (!coverBlob) {
for (const candidate of buildYouTubeThumbnailUrls(youtubeVideoId)) {
const candidateBlob = await downloadImage(candidate);
if (!candidateBlob) {
continue;
}
coverBlob = candidateBlob;
coverUrl = candidate;
break;
}
}
if (!coverBlob) {
const durationMs = getVideoDurationMs(this.db, videoId);
const maxSeconds = durationMs > 0 ? Math.min(durationMs / 1000, YOUTUBE_SCREENSHOT_MAX_SECONDS) : null;
const seekSecond = Math.random() * (maxSeconds ?? YOUTUBE_SCREENSHOT_MAX_SECONDS);
try {
coverBlob = await this.mediaGenerator.generateScreenshot(
sourceUrl,
seekSecond,
{
format: 'jpg',
quality: 90,
maxWidth: 640,
},
);
} catch (error) {
this.logger.warn(
'cover-art: failed to generate YouTube screenshot for videoId=%d: %s',
videoId,
(error as Error).message,
);
}
}
if (coverBlob) {
upsertCoverArt(this.db, videoId, {
anilistId: existing?.anilistId ?? null,
coverUrl,
coverBlob,
titleRomaji: existing?.titleRomaji ?? null,
titleEnglish: existing?.titleEnglish ?? null,
episodesTotal: existing?.episodesTotal ?? null,
});
return true;
}
const shouldCacheNoMatch =
!existing || (existing.coverUrl === null && existing.anilistId === null);
if (shouldCacheNoMatch) {
upsertCoverArt(this.db, videoId, {
anilistId: null,
coverUrl: null,
coverBlob: null,
titleRomaji: existing?.titleRomaji ?? null,
titleEnglish: existing?.titleEnglish ?? null,
episodesTotal: existing?.episodesTotal ?? null,
});
}
return false;
}
private captureYoutubeMetadataAsync(videoId: number, sourceUrl: string): void {
if (this.pendingYoutubeMetadataFetches.has(videoId)) {
return;
}
const pending = (async () => {
try {
const metadata = await probeYoutubeVideoMetadata(sourceUrl);
if (!metadata) {
return;
}
upsertYoutubeVideoMetadata(this.db, videoId, metadata);
if (metadata.videoTitle?.trim()) {
updateVideoTitleRecord(this.db, videoId, metadata.videoTitle.trim());
}
} catch (error) {
this.logger.debug(
'youtube metadata capture skipped for videoId=%d: %s',
videoId,
(error as Error).message,
);
}
})();
this.pendingYoutubeMetadataFetches.set(videoId, pending);
pending.finally(() => {
this.pendingYoutubeMetadataFetches.delete(videoId);
});
}
handleMediaChange(mediaPath: string | null, mediaTitle: string | null): void { handleMediaChange(mediaPath: string | null, mediaTitle: string | null): void {
const normalizedPath = normalizeMediaPath(mediaPath); const normalizedPath = normalizeMediaPath(mediaPath);
const normalizedTitle = normalizeText(mediaTitle); const normalizedTitle = normalizeText(mediaTitle);
@@ -721,6 +971,13 @@ export class ImmersionTrackerService {
`Starting immersion session for path=${normalizedPath} videoId=${sessionInfo.videoId}`, `Starting immersion session for path=${normalizedPath} videoId=${sessionInfo.videoId}`,
); );
this.startSession(sessionInfo.videoId, sessionInfo.startedAtMs); this.startSession(sessionInfo.videoId, sessionInfo.startedAtMs);
if (sourceType === SOURCE_TYPE_REMOTE) {
const youtubeVideoId = extractYouTubeVideoId(normalizedPath);
if (youtubeVideoId) {
void this.ensureYouTubeCoverArt(sessionInfo.videoId, normalizedPath, youtubeVideoId);
this.captureYoutubeMetadataAsync(sessionInfo.videoId, normalizedPath);
}
}
this.captureAnimeMetadataAsync(sessionInfo.videoId, normalizedPath, normalizedTitle || null); this.captureAnimeMetadataAsync(sessionInfo.videoId, normalizedPath, normalizedTitle || null);
this.captureVideoMetadataAsync(sessionInfo.videoId, sourceType, normalizedPath); this.captureVideoMetadataAsync(sessionInfo.videoId, sourceType, normalizedPath);
} }

View File

@@ -39,6 +39,7 @@ import {
} from '../query.js'; } from '../query.js';
import { import {
SOURCE_TYPE_LOCAL, SOURCE_TYPE_LOCAL,
SOURCE_TYPE_REMOTE,
EVENT_CARD_MINED, EVENT_CARD_MINED,
EVENT_SUBTITLE_LINE, EVENT_SUBTITLE_LINE,
EVENT_YOMITAN_LOOKUP, EVENT_YOMITAN_LOOKUP,
@@ -1956,6 +1957,100 @@ test('media library and detail queries read lifetime totals', () => {
} }
}); });
test('media library and detail queries include joined youtube metadata when present', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const mediaOne = getOrCreateVideoRecord(db, 'yt:https://www.youtube.com/watch?v=abc123', {
canonicalTitle: 'Local Fallback Title',
sourcePath: null,
sourceUrl: 'https://www.youtube.com/watch?v=abc123',
sourceType: SOURCE_TYPE_REMOTE,
});
db.prepare(
`
INSERT INTO imm_lifetime_media (
video_id,
total_sessions,
total_active_ms,
total_cards,
total_lines_seen,
total_tokens_seen,
completed,
first_watched_ms,
last_watched_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(mediaOne, 2, 6_000, 1, 5, 80, 0, 1_000, 9_000, 9_000, 9_000);
db.prepare(
`
INSERT INTO imm_youtube_videos (
video_id,
youtube_video_id,
video_url,
video_title,
video_thumbnail_url,
channel_id,
channel_name,
channel_url,
channel_thumbnail_url,
uploader_id,
uploader_url,
description,
metadata_json,
fetched_at_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
`,
).run(
mediaOne,
'abc123',
'https://www.youtube.com/watch?v=abc123',
'Tracked Video Title',
'https://i.ytimg.com/vi/abc123/hqdefault.jpg',
'UCcreator123',
'Creator Name',
'https://www.youtube.com/channel/UCcreator123',
'https://yt3.googleusercontent.com/channel-avatar=s88',
'@creator',
'https://www.youtube.com/@creator',
'Video description',
'{"source":"test"}',
10_000,
10_000,
10_000,
);
const library = getMediaLibrary(db);
const detail = getMediaDetail(db, mediaOne);
assert.equal(library.length, 1);
assert.equal(library[0]?.youtubeVideoId, 'abc123');
assert.equal(library[0]?.videoTitle, 'Tracked Video Title');
assert.equal(library[0]?.channelId, 'UCcreator123');
assert.equal(library[0]?.channelName, 'Creator Name');
assert.equal(library[0]?.channelUrl, 'https://www.youtube.com/channel/UCcreator123');
assert.equal(detail?.youtubeVideoId, 'abc123');
assert.equal(detail?.videoUrl, 'https://www.youtube.com/watch?v=abc123');
assert.equal(detail?.videoThumbnailUrl, 'https://i.ytimg.com/vi/abc123/hqdefault.jpg');
assert.equal(detail?.channelThumbnailUrl, 'https://yt3.googleusercontent.com/channel-avatar=s88');
assert.equal(detail?.uploaderId, '@creator');
assert.equal(detail?.uploaderUrl, 'https://www.youtube.com/@creator');
assert.equal(detail?.description, 'Video description');
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('cover art queries reuse a shared blob across duplicate anime art rows', () => { test('cover art queries reuse a shared blob across duplicate anime art rows', () => {
const dbPath = makeDbPath(); const dbPath = makeDbPath();
const db = new Database(dbPath); const db = new Database(dbPath);

View File

@@ -1817,6 +1817,17 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
COALESCE(lm.total_cards, 0) AS totalCards, COALESCE(lm.total_cards, 0) AS totalCards,
COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen, COALESCE(lm.total_tokens_seen, 0) AS totalTokensSeen,
COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs, COALESCE(lm.last_watched_ms, 0) AS lastWatchedMs,
yv.youtube_video_id AS youtubeVideoId,
yv.video_url AS videoUrl,
yv.video_title AS videoTitle,
yv.video_thumbnail_url AS videoThumbnailUrl,
yv.channel_id AS channelId,
yv.channel_name AS channelName,
yv.channel_url AS channelUrl,
yv.channel_thumbnail_url AS channelThumbnailUrl,
yv.uploader_id AS uploaderId,
yv.uploader_url AS uploaderUrl,
yv.description AS description,
CASE CASE
WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1 WHEN ma.cover_blob_hash IS NOT NULL OR ma.cover_blob IS NOT NULL THEN 1
ELSE 0 ELSE 0
@@ -1824,6 +1835,7 @@ export function getMediaLibrary(db: DatabaseSync): MediaLibraryRow[] {
FROM imm_videos v FROM imm_videos v
JOIN imm_lifetime_media lm ON lm.video_id = v.video_id JOIN imm_lifetime_media lm ON lm.video_id = v.video_id
LEFT JOIN imm_media_art ma ON ma.video_id = v.video_id LEFT JOIN imm_media_art ma ON ma.video_id = v.video_id
LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id
ORDER BY lm.last_watched_ms DESC ORDER BY lm.last_watched_ms DESC
`, `,
) )
@@ -1846,9 +1858,21 @@ export function getMediaDetail(db: DatabaseSync, videoId: number): MediaDetailRo
COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen, COALESCE(lm.total_lines_seen, 0) AS totalLinesSeen,
COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount, COALESCE(SUM(COALESCE(asm.lookupCount, s.lookup_count, 0)), 0) AS totalLookupCount,
COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits, COALESCE(SUM(COALESCE(asm.lookupHits, s.lookup_hits, 0)), 0) AS totalLookupHits,
COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount COALESCE(SUM(COALESCE(asm.yomitanLookupCount, s.yomitan_lookup_count, 0)), 0) AS totalYomitanLookupCount,
yv.youtube_video_id AS youtubeVideoId,
yv.video_url AS videoUrl,
yv.video_title AS videoTitle,
yv.video_thumbnail_url AS videoThumbnailUrl,
yv.channel_id AS channelId,
yv.channel_name AS channelName,
yv.channel_url AS channelUrl,
yv.channel_thumbnail_url AS channelThumbnailUrl,
yv.uploader_id AS uploaderId,
yv.uploader_url AS uploaderUrl,
yv.description AS description
FROM imm_videos v FROM imm_videos v
JOIN imm_lifetime_media lm ON lm.video_id = v.video_id JOIN imm_lifetime_media lm ON lm.video_id = v.video_id
LEFT JOIN imm_youtube_videos yv ON yv.video_id = v.video_id
LEFT JOIN imm_sessions s ON s.video_id = v.video_id LEFT JOIN imm_sessions s ON s.video_id = v.video_id
LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id LEFT JOIN active_session_metrics asm ON asm.sessionId = s.session_id
WHERE v.video_id = ? WHERE v.video_id = ?

View File

@@ -106,6 +106,7 @@ test('ensureSchema creates immersion core tables', () => {
assert.ok(tableNames.has('imm_kanji_line_occurrences')); assert.ok(tableNames.has('imm_kanji_line_occurrences'));
assert.ok(tableNames.has('imm_rollup_state')); assert.ok(tableNames.has('imm_rollup_state'));
assert.ok(tableNames.has('imm_cover_art_blobs')); assert.ok(tableNames.has('imm_cover_art_blobs'));
assert.ok(tableNames.has('imm_youtube_videos'));
const videoColumns = new Set( const videoColumns = new Set(
( (
@@ -146,6 +147,114 @@ test('ensureSchema creates immersion core tables', () => {
} }
}); });
test('ensureSchema adds youtube metadata table to existing schema version 15 databases', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
db.exec(`
CREATE TABLE imm_schema_version (
schema_version INTEGER PRIMARY KEY,
applied_at_ms INTEGER NOT NULL
);
INSERT INTO imm_schema_version(schema_version, applied_at_ms) VALUES (15, 1000);
CREATE TABLE imm_rollup_state(
state_key TEXT PRIMARY KEY,
state_value INTEGER NOT NULL
);
INSERT INTO imm_rollup_state(state_key, state_value) VALUES ('last_rollup_sample_ms', 123);
CREATE TABLE imm_anime(
anime_id INTEGER PRIMARY KEY AUTOINCREMENT,
normalized_title_key TEXT NOT NULL UNIQUE,
canonical_title TEXT NOT NULL,
anilist_id INTEGER UNIQUE,
title_romaji TEXT,
title_english TEXT,
title_native TEXT,
episodes_total INTEGER,
description TEXT,
metadata_json TEXT,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER
);
CREATE TABLE imm_videos(
video_id INTEGER PRIMARY KEY AUTOINCREMENT,
video_key TEXT NOT NULL UNIQUE,
anime_id INTEGER,
canonical_title TEXT NOT NULL,
source_type INTEGER NOT NULL,
source_path TEXT,
source_url TEXT,
parsed_basename TEXT,
parsed_title TEXT,
parsed_season INTEGER,
parsed_episode INTEGER,
parser_source TEXT,
parser_confidence REAL,
parse_metadata_json TEXT,
watched INTEGER NOT NULL DEFAULT 0,
duration_ms INTEGER NOT NULL CHECK(duration_ms>=0),
file_size_bytes INTEGER CHECK(file_size_bytes>=0),
codec_id INTEGER, container_id INTEGER,
width_px INTEGER, height_px INTEGER, fps_x100 INTEGER,
bitrate_kbps INTEGER, audio_codec_id INTEGER,
hash_sha256 TEXT, screenshot_path TEXT,
metadata_json TEXT,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE SET NULL
);
`);
ensureSchema(db);
const tables = new Set(
(
db.prepare(`SELECT name FROM sqlite_master WHERE type = 'table' AND name LIKE 'imm_%'`).all() as Array<{
name: string;
}>
).map((row) => row.name),
);
assert.ok(tables.has('imm_youtube_videos'));
const columns = new Set(
(
db.prepare('PRAGMA table_info(imm_youtube_videos)').all() as Array<{
name: string;
}>
).map((row) => row.name),
);
assert.deepEqual(
columns,
new Set([
'video_id',
'youtube_video_id',
'video_url',
'video_title',
'video_thumbnail_url',
'channel_id',
'channel_name',
'channel_url',
'channel_thumbnail_url',
'uploader_id',
'uploader_url',
'description',
'metadata_json',
'fetched_at_ms',
'CREATED_DATE',
'LAST_UPDATE_DATE',
]),
);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('ensureSchema creates large-history performance indexes', () => { test('ensureSchema creates large-history performance indexes', () => {
const dbPath = makeDbPath(); const dbPath = makeDbPath();
const db = new Database(dbPath); const db = new Database(dbPath);
@@ -169,6 +278,8 @@ test('ensureSchema creates large-history performance indexes', () => {
assert.ok(indexNames.has('idx_kanji_frequency')); assert.ok(indexNames.has('idx_kanji_frequency'));
assert.ok(indexNames.has('idx_media_art_anilist_id')); assert.ok(indexNames.has('idx_media_art_anilist_id'));
assert.ok(indexNames.has('idx_media_art_cover_url')); assert.ok(indexNames.has('idx_media_art_cover_url'));
assert.ok(indexNames.has('idx_youtube_videos_channel_id'));
assert.ok(indexNames.has('idx_youtube_videos_youtube_video_id'));
} finally { } finally {
db.close(); db.close();
cleanupDbPath(dbPath); cleanupDbPath(dbPath);

View File

@@ -2,7 +2,7 @@ import { createHash } from 'node:crypto';
import { parseMediaInfo } from '../../../jimaku/utils'; import { parseMediaInfo } from '../../../jimaku/utils';
import type { DatabaseSync } from './sqlite'; import type { DatabaseSync } from './sqlite';
import { SCHEMA_VERSION } from './types'; import { SCHEMA_VERSION } from './types';
import type { QueuedWrite, VideoMetadata } from './types'; import type { QueuedWrite, VideoMetadata, YoutubeVideoMetadata } from './types';
export interface TrackerPreparedStatements { export interface TrackerPreparedStatements {
telemetryInsertStmt: ReturnType<DatabaseSync['prepare']>; telemetryInsertStmt: ReturnType<DatabaseSync['prepare']>;
@@ -743,6 +743,27 @@ export function ensureSchema(db: DatabaseSync): void {
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
); );
`); `);
db.exec(`
CREATE TABLE IF NOT EXISTS imm_youtube_videos(
video_id INTEGER PRIMARY KEY,
youtube_video_id TEXT NOT NULL,
video_url TEXT NOT NULL,
video_title TEXT,
video_thumbnail_url TEXT,
channel_id TEXT,
channel_name TEXT,
channel_url TEXT,
channel_thumbnail_url TEXT,
uploader_id TEXT,
uploader_url TEXT,
description TEXT,
metadata_json TEXT,
fetched_at_ms INTEGER NOT NULL,
CREATED_DATE INTEGER,
LAST_UPDATE_DATE INTEGER,
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
);
`);
db.exec(` db.exec(`
CREATE TABLE IF NOT EXISTS imm_cover_art_blobs( CREATE TABLE IF NOT EXISTS imm_cover_art_blobs(
blob_hash TEXT PRIMARY KEY, blob_hash TEXT PRIMARY KEY,
@@ -1134,6 +1155,14 @@ export function ensureSchema(db: DatabaseSync): void {
CREATE INDEX IF NOT EXISTS idx_media_art_cover_url CREATE INDEX IF NOT EXISTS idx_media_art_cover_url
ON imm_media_art(cover_url) ON imm_media_art(cover_url)
`); `);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_youtube_videos_channel_id
ON imm_youtube_videos(channel_id)
`);
db.exec(`
CREATE INDEX IF NOT EXISTS idx_youtube_videos_youtube_video_id
ON imm_youtube_videos(youtube_video_id)
`);
if (currentVersion?.schema_version && currentVersion.schema_version < SCHEMA_VERSION) { if (currentVersion?.schema_version && currentVersion.schema_version < SCHEMA_VERSION) {
db.exec('DELETE FROM imm_daily_rollups'); db.exec('DELETE FROM imm_daily_rollups');
@@ -1506,3 +1535,65 @@ export function updateVideoTitleRecord(
`, `,
).run(canonicalTitle, Date.now(), videoId); ).run(canonicalTitle, Date.now(), videoId);
} }
export function upsertYoutubeVideoMetadata(
db: DatabaseSync,
videoId: number,
metadata: YoutubeVideoMetadata,
): void {
const nowMs = Date.now();
db.prepare(
`
INSERT INTO imm_youtube_videos (
video_id,
youtube_video_id,
video_url,
video_title,
video_thumbnail_url,
channel_id,
channel_name,
channel_url,
channel_thumbnail_url,
uploader_id,
uploader_url,
description,
metadata_json,
fetched_at_ms,
CREATED_DATE,
LAST_UPDATE_DATE
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
ON CONFLICT(video_id) DO UPDATE SET
youtube_video_id = excluded.youtube_video_id,
video_url = excluded.video_url,
video_title = excluded.video_title,
video_thumbnail_url = excluded.video_thumbnail_url,
channel_id = excluded.channel_id,
channel_name = excluded.channel_name,
channel_url = excluded.channel_url,
channel_thumbnail_url = excluded.channel_thumbnail_url,
uploader_id = excluded.uploader_id,
uploader_url = excluded.uploader_url,
description = excluded.description,
metadata_json = excluded.metadata_json,
fetched_at_ms = excluded.fetched_at_ms,
LAST_UPDATE_DATE = excluded.LAST_UPDATE_DATE
`,
).run(
videoId,
metadata.youtubeVideoId,
metadata.videoUrl,
metadata.videoTitle ?? null,
metadata.videoThumbnailUrl ?? null,
metadata.channelId ?? null,
metadata.channelName ?? null,
metadata.channelUrl ?? null,
metadata.channelThumbnailUrl ?? null,
metadata.uploaderId ?? null,
metadata.uploaderUrl ?? null,
metadata.description ?? null,
metadata.metadataJson ?? null,
nowMs,
nowMs,
nowMs,
);
}

View File

@@ -1,4 +1,4 @@
export const SCHEMA_VERSION = 15; export const SCHEMA_VERSION = 16;
export const DEFAULT_QUEUE_CAP = 1_000; export const DEFAULT_QUEUE_CAP = 1_000;
export const DEFAULT_BATCH_SIZE = 25; export const DEFAULT_BATCH_SIZE = 25;
export const DEFAULT_FLUSH_INTERVAL_MS = 500; export const DEFAULT_FLUSH_INTERVAL_MS = 500;
@@ -420,6 +420,17 @@ export interface MediaLibraryRow {
totalTokensSeen: number; totalTokensSeen: number;
lastWatchedMs: number; lastWatchedMs: number;
hasCoverArt: number; hasCoverArt: number;
youtubeVideoId: string | null;
videoUrl: string | null;
videoTitle: string | null;
videoThumbnailUrl: string | null;
channelId: string | null;
channelName: string | null;
channelUrl: string | null;
channelThumbnailUrl: string | null;
uploaderId: string | null;
uploaderUrl: string | null;
description: string | null;
} }
export interface MediaDetailRow { export interface MediaDetailRow {
@@ -434,6 +445,32 @@ export interface MediaDetailRow {
totalLookupCount: number; totalLookupCount: number;
totalLookupHits: number; totalLookupHits: number;
totalYomitanLookupCount: number; totalYomitanLookupCount: number;
youtubeVideoId: string | null;
videoUrl: string | null;
videoTitle: string | null;
videoThumbnailUrl: string | null;
channelId: string | null;
channelName: string | null;
channelUrl: string | null;
channelThumbnailUrl: string | null;
uploaderId: string | null;
uploaderUrl: string | null;
description: string | null;
}
export interface YoutubeVideoMetadata {
youtubeVideoId: string;
videoUrl: string;
videoTitle: string | null;
videoThumbnailUrl: string | null;
channelId: string | null;
channelName: string | null;
channelUrl: string | null;
channelThumbnailUrl: string | null;
uploaderId: string | null;
uploaderUrl: string | null;
description: string | null;
metadataJson: string | null;
} }
export interface AnimeLibraryRow { export interface AnimeLibraryRow {

View File

@@ -79,7 +79,10 @@ export {
handleOverlayWindowBeforeInputEvent, handleOverlayWindowBeforeInputEvent,
isTabInputForMpvForwarding, isTabInputForMpvForwarding,
} from './overlay-window-input'; } from './overlay-window-input';
export { initializeOverlayRuntime } from './overlay-runtime-init'; export {
initializeOverlayAnkiIntegration,
initializeOverlayRuntime,
} from './overlay-runtime-init';
export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility'; export { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility';
export { export {
MPV_REQUEST_ID_SECONDARY_SUB_VISIBILITY, MPV_REQUEST_ID_SECONDARY_SUB_VISIBILITY,

View File

@@ -3,6 +3,7 @@ import assert from 'node:assert/strict';
import { createIpcDepsRuntime, registerIpcHandlers, type IpcServiceDeps } from './ipc'; import { createIpcDepsRuntime, registerIpcHandlers, type IpcServiceDeps } from './ipc';
import { IPC_CHANNELS } from '../../shared/ipc/contracts'; import { IPC_CHANNELS } from '../../shared/ipc/contracts';
import type { SubtitleSidebarSnapshot } from '../../types';
interface FakeIpcRegistrar { interface FakeIpcRegistrar {
on: Map<string, (event: unknown, ...args: unknown[]) => void>; on: Map<string, (event: unknown, ...args: unknown[]) => void>;
@@ -77,6 +78,31 @@ function createControllerConfigFixture() {
}; };
} }
function createSubtitleSidebarSnapshotFixture(): SubtitleSidebarSnapshot {
return {
cues: [],
currentSubtitle: { text: '', startTime: null, endTime: null },
config: {
enabled: false,
autoOpen: false,
layout: 'overlay',
toggleKey: 'Backslash',
pauseVideoOnHover: false,
autoScroll: true,
maxWidth: 420,
opacity: 0.92,
backgroundColor: 'rgba(54, 58, 79, 0.88)',
textColor: '#cad3f5',
fontFamily: '"M PLUS 1", "Noto Sans CJK JP", sans-serif',
fontSize: 16,
timestampColor: '#a5adcb',
activeLineColor: '#f5bde6',
activeLineBackgroundColor: 'rgba(138, 173, 244, 0.22)',
hoverLineBackgroundColor: 'rgba(54, 58, 79, 0.84)',
},
};
}
function createRegisterIpcDeps(overrides: Partial<IpcServiceDeps> = {}): IpcServiceDeps { function createRegisterIpcDeps(overrides: Partial<IpcServiceDeps> = {}): IpcServiceDeps {
return { return {
onOverlayModalClosed: () => {}, onOverlayModalClosed: () => {},
@@ -88,6 +114,7 @@ function createRegisterIpcDeps(overrides: Partial<IpcServiceDeps> = {}): IpcServ
tokenizeCurrentSubtitle: async () => null, tokenizeCurrentSubtitle: async () => null,
getCurrentSubtitleRaw: () => '', getCurrentSubtitleRaw: () => '',
getCurrentSubtitleAss: () => '', getCurrentSubtitleAss: () => '',
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
getPlaybackPaused: () => false, getPlaybackPaused: () => false,
getSubtitlePosition: () => null, getSubtitlePosition: () => null,
getSubtitleStyle: () => null, getSubtitleStyle: () => null,
@@ -117,6 +144,7 @@ function createRegisterIpcDeps(overrides: Partial<IpcServiceDeps> = {}): IpcServ
getAnilistQueueStatus: () => ({}), getAnilistQueueStatus: () => ({}),
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }), retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
immersionTracker: null, immersionTracker: null,
...overrides, ...overrides,
}; };
@@ -173,6 +201,7 @@ test('createIpcDepsRuntime wires AniList handlers', async () => {
tokenizeCurrentSubtitle: async () => null, tokenizeCurrentSubtitle: async () => null,
getCurrentSubtitleRaw: () => '', getCurrentSubtitleRaw: () => '',
getCurrentSubtitleAss: () => '', getCurrentSubtitleAss: () => '',
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
getPlaybackPaused: () => true, getPlaybackPaused: () => true,
getSubtitlePosition: () => null, getSubtitlePosition: () => null,
getSubtitleStyle: () => null, getSubtitleStyle: () => null,
@@ -208,6 +237,7 @@ test('createIpcDepsRuntime wires AniList handlers', async () => {
return { ok: true, message: 'done' }; return { ok: true, message: 'done' };
}, },
appendClipboardVideoToQueue: () => ({ ok: true, message: 'queued' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'queued' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
}); });
assert.deepEqual(deps.getAnilistStatus(), { tokenStatus: 'resolved' }); assert.deepEqual(deps.getAnilistStatus(), { tokenStatus: 'resolved' });
@@ -269,6 +299,7 @@ test('registerIpcHandlers rejects malformed runtime-option payloads', async () =
cycles.push({ id, direction }); cycles.push({ id, direction });
return { ok: true }; return { ok: true };
}, },
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
reportOverlayContentBounds: () => {}, reportOverlayContentBounds: () => {},
getAnilistStatus: () => ({}), getAnilistStatus: () => ({}),
clearAnilistToken: () => {}, clearAnilistToken: () => {},
@@ -276,6 +307,7 @@ test('registerIpcHandlers rejects malformed runtime-option payloads', async () =
getAnilistQueueStatus: () => ({}), getAnilistQueueStatus: () => ({}),
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }), retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
}, },
registrar, registrar,
); );
@@ -320,6 +352,24 @@ test('registerIpcHandlers rejects malformed runtime-option payloads', async () =
); );
}); });
test('registerIpcHandlers exposes subtitle sidebar snapshot request', async () => {
const { registrar, handlers } = createFakeIpcRegistrar();
const snapshot = createSubtitleSidebarSnapshotFixture();
snapshot.cues = [{ startTime: 1, endTime: 2, text: 'line-1' }];
snapshot.config.enabled = true;
registerIpcHandlers(
createRegisterIpcDeps({
getSubtitleSidebarSnapshot: async () => snapshot,
}),
registrar,
);
const handler = handlers.handle.get(IPC_CHANNELS.request.getSubtitleSidebarSnapshot);
assert.ok(handler);
assert.deepEqual(await handler!({}), snapshot);
});
test('registerIpcHandlers forwards yomitan lookup tracking commands to immersion tracker', () => { test('registerIpcHandlers forwards yomitan lookup tracking commands to immersion tracker', () => {
const { registrar, handlers } = createFakeIpcRegistrar(); const { registrar, handlers } = createFakeIpcRegistrar();
const calls: string[] = []; const calls: string[] = [];
@@ -530,6 +580,7 @@ test('registerIpcHandlers ignores malformed fire-and-forget payloads', () => {
tokenizeCurrentSubtitle: async () => null, tokenizeCurrentSubtitle: async () => null,
getCurrentSubtitleRaw: () => '', getCurrentSubtitleRaw: () => '',
getCurrentSubtitleAss: () => '', getCurrentSubtitleAss: () => '',
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
getPlaybackPaused: () => false, getPlaybackPaused: () => false,
getSubtitlePosition: () => null, getSubtitlePosition: () => null,
getSubtitleStyle: () => null, getSubtitleStyle: () => null,
@@ -563,6 +614,7 @@ test('registerIpcHandlers ignores malformed fire-and-forget payloads', () => {
getAnilistQueueStatus: () => ({}), getAnilistQueueStatus: () => ({}),
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }), retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
}, },
registrar, registrar,
); );
@@ -596,6 +648,7 @@ test('registerIpcHandlers awaits saveControllerPreference through request-respon
tokenizeCurrentSubtitle: async () => null, tokenizeCurrentSubtitle: async () => null,
getCurrentSubtitleRaw: () => '', getCurrentSubtitleRaw: () => '',
getCurrentSubtitleAss: () => '', getCurrentSubtitleAss: () => '',
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
getPlaybackPaused: () => false, getPlaybackPaused: () => false,
getSubtitlePosition: () => null, getSubtitlePosition: () => null,
getSubtitleStyle: () => null, getSubtitleStyle: () => null,
@@ -628,6 +681,7 @@ test('registerIpcHandlers awaits saveControllerPreference through request-respon
getAnilistQueueStatus: () => ({}), getAnilistQueueStatus: () => ({}),
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }), retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
}, },
registrar, registrar,
); );
@@ -667,6 +721,7 @@ test('registerIpcHandlers rejects malformed controller preference payloads', asy
tokenizeCurrentSubtitle: async () => null, tokenizeCurrentSubtitle: async () => null,
getCurrentSubtitleRaw: () => '', getCurrentSubtitleRaw: () => '',
getCurrentSubtitleAss: () => '', getCurrentSubtitleAss: () => '',
getSubtitleSidebarSnapshot: async () => createSubtitleSidebarSnapshotFixture(),
getPlaybackPaused: () => false, getPlaybackPaused: () => false,
getSubtitlePosition: () => null, getSubtitlePosition: () => null,
getSubtitleStyle: () => null, getSubtitleStyle: () => null,
@@ -696,6 +751,7 @@ test('registerIpcHandlers rejects malformed controller preference payloads', asy
getAnilistQueueStatus: () => ({}), getAnilistQueueStatus: () => ({}),
retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }), retryAnilistQueueNow: async () => ({ ok: true, message: 'ok' }),
appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }), appendClipboardVideoToQueue: () => ({ ok: true, message: 'ok' }),
onYoutubePickerResolve: async () => ({ ok: true, message: 'ok' }),
}, },
registrar, registrar,
); );

View File

@@ -6,9 +6,12 @@ import type {
ResolvedControllerConfig, ResolvedControllerConfig,
RuntimeOptionId, RuntimeOptionId,
RuntimeOptionValue, RuntimeOptionValue,
SubtitleSidebarSnapshot,
SubtitlePosition, SubtitlePosition,
SubsyncManualRunRequest, SubsyncManualRunRequest,
SubsyncResult, SubsyncResult,
YoutubePickerResolveRequest,
YoutubePickerResolveResult,
} from '../../types'; } from '../../types';
import { IPC_CHANNELS, type OverlayHostedModal } from '../../shared/ipc/contracts'; import { IPC_CHANNELS, type OverlayHostedModal } from '../../shared/ipc/contracts';
import { import {
@@ -22,6 +25,7 @@ import {
parseRuntimeOptionValue, parseRuntimeOptionValue,
parseSubtitlePosition, parseSubtitlePosition,
parseSubsyncManualRunRequest, parseSubsyncManualRunRequest,
parseYoutubePickerResolveRequest,
} from '../../shared/ipc/validators'; } from '../../shared/ipc/validators';
const { BrowserWindow, ipcMain } = electron; const { BrowserWindow, ipcMain } = electron;
@@ -37,6 +41,7 @@ export interface IpcServiceDeps {
tokenizeCurrentSubtitle: () => Promise<unknown>; tokenizeCurrentSubtitle: () => Promise<unknown>;
getCurrentSubtitleRaw: () => string; getCurrentSubtitleRaw: () => string;
getCurrentSubtitleAss: () => string; getCurrentSubtitleAss: () => string;
getSubtitleSidebarSnapshot?: () => Promise<SubtitleSidebarSnapshot>;
getPlaybackPaused: () => boolean | null; getPlaybackPaused: () => boolean | null;
getSubtitlePosition: () => unknown; getSubtitlePosition: () => unknown;
getSubtitleStyle: () => unknown; getSubtitleStyle: () => unknown;
@@ -59,6 +64,7 @@ export interface IpcServiceDeps {
getCurrentSecondarySub: () => string; getCurrentSecondarySub: () => string;
focusMainWindow: () => void; focusMainWindow: () => void;
runSubsyncManual: (request: SubsyncManualRunRequest) => Promise<SubsyncResult>; runSubsyncManual: (request: SubsyncManualRunRequest) => Promise<SubsyncResult>;
onYoutubePickerResolve: (request: YoutubePickerResolveRequest) => Promise<YoutubePickerResolveResult>;
getAnkiConnectStatus: () => boolean; getAnkiConnectStatus: () => boolean;
getRuntimeOptions: () => unknown; getRuntimeOptions: () => unknown;
setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown; setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown;
@@ -143,6 +149,7 @@ export interface IpcDepsRuntimeOptions {
tokenizeCurrentSubtitle: () => Promise<unknown>; tokenizeCurrentSubtitle: () => Promise<unknown>;
getCurrentSubtitleRaw: () => string; getCurrentSubtitleRaw: () => string;
getCurrentSubtitleAss: () => string; getCurrentSubtitleAss: () => string;
getSubtitleSidebarSnapshot?: () => Promise<SubtitleSidebarSnapshot>;
getPlaybackPaused: () => boolean | null; getPlaybackPaused: () => boolean | null;
getSubtitlePosition: () => unknown; getSubtitlePosition: () => unknown;
getSubtitleStyle: () => unknown; getSubtitleStyle: () => unknown;
@@ -160,6 +167,7 @@ export interface IpcDepsRuntimeOptions {
getMpvClient: () => MpvClientLike | null; getMpvClient: () => MpvClientLike | null;
focusMainWindow: () => void; focusMainWindow: () => void;
runSubsyncManual: (request: SubsyncManualRunRequest) => Promise<SubsyncResult>; runSubsyncManual: (request: SubsyncManualRunRequest) => Promise<SubsyncResult>;
onYoutubePickerResolve: (request: YoutubePickerResolveRequest) => Promise<YoutubePickerResolveResult>;
getAnkiConnectStatus: () => boolean; getAnkiConnectStatus: () => boolean;
getRuntimeOptions: () => unknown; getRuntimeOptions: () => unknown;
setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown; setRuntimeOption: (id: RuntimeOptionId, value: RuntimeOptionValue) => unknown;
@@ -190,6 +198,7 @@ export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcService
tokenizeCurrentSubtitle: options.tokenizeCurrentSubtitle, tokenizeCurrentSubtitle: options.tokenizeCurrentSubtitle,
getCurrentSubtitleRaw: options.getCurrentSubtitleRaw, getCurrentSubtitleRaw: options.getCurrentSubtitleRaw,
getCurrentSubtitleAss: options.getCurrentSubtitleAss, getCurrentSubtitleAss: options.getCurrentSubtitleAss,
getSubtitleSidebarSnapshot: options.getSubtitleSidebarSnapshot,
getPlaybackPaused: options.getPlaybackPaused, getPlaybackPaused: options.getPlaybackPaused,
getSubtitlePosition: options.getSubtitlePosition, getSubtitlePosition: options.getSubtitlePosition,
getSubtitleStyle: options.getSubtitleStyle, getSubtitleStyle: options.getSubtitleStyle,
@@ -221,6 +230,7 @@ export function createIpcDepsRuntime(options: IpcDepsRuntimeOptions): IpcService
mainWindow.focus(); mainWindow.focus();
}, },
runSubsyncManual: options.runSubsyncManual, runSubsyncManual: options.runSubsyncManual,
onYoutubePickerResolve: options.onYoutubePickerResolve,
getAnkiConnectStatus: options.getAnkiConnectStatus, getAnkiConnectStatus: options.getAnkiConnectStatus,
getRuntimeOptions: options.getRuntimeOptions, getRuntimeOptions: options.getRuntimeOptions,
setRuntimeOption: options.setRuntimeOption, setRuntimeOption: options.setRuntimeOption,
@@ -281,6 +291,14 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
deps.onOverlayModalOpened(parsedModal); deps.onOverlayModalOpened(parsedModal);
}); });
ipc.handle(IPC_CHANNELS.request.youtubePickerResolve, async (_event: unknown, request: unknown) => {
const parsedRequest = parseYoutubePickerResolveRequest(request);
if (!parsedRequest) {
return { ok: false, message: 'Invalid YouTube picker resolve payload' };
}
return await deps.onYoutubePickerResolve(parsedRequest);
});
ipc.on(IPC_CHANNELS.command.openYomitanSettings, () => { ipc.on(IPC_CHANNELS.command.openYomitanSettings, () => {
deps.openYomitanSettings(); deps.openYomitanSettings();
}); });
@@ -321,6 +339,13 @@ export function registerIpcHandlers(deps: IpcServiceDeps, ipc: IpcMainRegistrar
return deps.getCurrentSubtitleAss(); return deps.getCurrentSubtitleAss();
}); });
ipc.handle(IPC_CHANNELS.request.getSubtitleSidebarSnapshot, async () => {
if (!deps.getSubtitleSidebarSnapshot) {
throw new Error('Subtitle sidebar snapshot is unavailable.');
}
return await deps.getSubtitleSidebarSnapshot();
});
ipc.handle(IPC_CHANNELS.request.getPlaybackPaused, () => { ipc.handle(IPC_CHANNELS.request.getPlaybackPaused, () => {
return deps.getPlaybackPaused(); return deps.getPlaybackPaused();
}); });

View File

@@ -1,6 +1,6 @@
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import test from 'node:test'; import test from 'node:test';
import { initializeOverlayRuntime } from './overlay-runtime-init'; import { initializeOverlayAnkiIntegration, initializeOverlayRuntime } from './overlay-runtime-init';
test('initializeOverlayRuntime skips Anki integration when ankiConnect.enabled is false', () => { test('initializeOverlayRuntime skips Anki integration when ankiConnect.enabled is false', () => {
let createdIntegrations = 0; let createdIntegrations = 0;
@@ -109,6 +109,136 @@ test('initializeOverlayRuntime starts Anki integration when ankiConnect.enabled
assert.equal(setIntegrationCalls, 1); assert.equal(setIntegrationCalls, 1);
}); });
test('initializeOverlayAnkiIntegration can initialize Anki transport after overlay runtime already exists', () => {
let createdIntegrations = 0;
let startedIntegrations = 0;
let setIntegrationCalls = 0;
initializeOverlayAnkiIntegration({
getResolvedConfig: () => ({
ankiConnect: { enabled: true } as never,
}),
getSubtitleTimingTracker: () => ({}),
getMpvClient: () => ({
send: () => {},
}),
getRuntimeOptionsManager: () => ({
getEffectiveAnkiConnectConfig: (config) => config as never,
}),
createAnkiIntegration: (args) => {
createdIntegrations += 1;
assert.equal(args.config.enabled, true);
return {
start: () => {
startedIntegrations += 1;
},
};
},
setAnkiIntegration: () => {
setIntegrationCalls += 1;
},
showDesktopNotification: () => {},
createFieldGroupingCallback: () => async () => ({
keepNoteId: 11,
deleteNoteId: 12,
deleteDuplicate: false,
cancelled: false,
}),
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
});
assert.equal(createdIntegrations, 1);
assert.equal(startedIntegrations, 1);
assert.equal(setIntegrationCalls, 1);
});
test('initializeOverlayAnkiIntegration returns false when integration already exists', () => {
let createdIntegrations = 0;
let startedIntegrations = 0;
let setIntegrationCalls = 0;
const result = initializeOverlayAnkiIntegration({
getResolvedConfig: () => ({
ankiConnect: { enabled: true } as never,
}),
getSubtitleTimingTracker: () => ({}),
getMpvClient: () => ({
send: () => {},
}),
getRuntimeOptionsManager: () => ({
getEffectiveAnkiConnectConfig: (config) => config as never,
}),
getAnkiIntegration: () => ({}),
createAnkiIntegration: () => {
createdIntegrations += 1;
return {
start: () => {
startedIntegrations += 1;
},
};
},
setAnkiIntegration: () => {
setIntegrationCalls += 1;
},
showDesktopNotification: () => {},
createFieldGroupingCallback: () => async () => ({
keepNoteId: 11,
deleteNoteId: 12,
deleteDuplicate: false,
cancelled: false,
}),
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
});
assert.equal(result, false);
assert.equal(createdIntegrations, 0);
assert.equal(startedIntegrations, 0);
assert.equal(setIntegrationCalls, 0);
});
test('initializeOverlayAnkiIntegration returns false when ankiConnect is disabled', () => {
let createdIntegrations = 0;
let startedIntegrations = 0;
let setIntegrationCalls = 0;
const result = initializeOverlayAnkiIntegration({
getResolvedConfig: () => ({
ankiConnect: { enabled: false } as never,
}),
getSubtitleTimingTracker: () => ({}),
getMpvClient: () => ({
send: () => {},
}),
getRuntimeOptionsManager: () => ({
getEffectiveAnkiConnectConfig: (config) => config as never,
}),
createAnkiIntegration: () => {
createdIntegrations += 1;
return {
start: () => {
startedIntegrations += 1;
},
};
},
setAnkiIntegration: () => {
setIntegrationCalls += 1;
},
showDesktopNotification: () => {},
createFieldGroupingCallback: () => async () => ({
keepNoteId: 11,
deleteNoteId: 12,
deleteDuplicate: false,
cancelled: false,
}),
getKnownWordCacheStatePath: () => '/tmp/known-words-cache.json',
});
assert.equal(result, false);
assert.equal(createdIntegrations, 0);
assert.equal(startedIntegrations, 0);
assert.equal(setIntegrationCalls, 0);
});
test('initializeOverlayRuntime can skip starting Anki integration transport', () => { test('initializeOverlayRuntime can skip starting Anki integration transport', () => {
let createdIntegrations = 0; let createdIntegrations = 0;
let startedIntegrations = 0; let startedIntegrations = 0;

View File

@@ -47,6 +47,24 @@ function createDefaultAnkiIntegration(args: CreateAnkiIntegrationArgs): AnkiInte
} }
export function initializeOverlayRuntime(options: { export function initializeOverlayRuntime(options: {
getMpvSocketPath: () => string;
getResolvedConfig: () => { ankiConnect?: AnkiConnectConfig; ai?: AiConfig };
getSubtitleTimingTracker: () => unknown | null;
getMpvClient: () => {
send?: (payload: { command: string[] }) => void;
} | null;
getRuntimeOptionsManager: () => {
getEffectiveAnkiConnectConfig: (config?: AnkiConnectConfig) => AnkiConnectConfig;
} | null;
getAnkiIntegration?: () => unknown | null;
setAnkiIntegration: (integration: unknown | null) => void;
showDesktopNotification: (title: string, options: { body?: string; icon?: string }) => void;
createFieldGroupingCallback: () => (
data: KikuFieldGroupingRequestData,
) => Promise<KikuFieldGroupingChoice>;
getKnownWordCacheStatePath: () => string;
shouldStartAnkiIntegration?: () => boolean;
createAnkiIntegration?: (args: CreateAnkiIntegrationArgs) => AnkiIntegrationLike;
backendOverride: string | null; backendOverride: string | null;
createMainWindow: () => void; createMainWindow: () => void;
registerGlobalShortcuts: () => void; registerGlobalShortcuts: () => void;
@@ -60,23 +78,6 @@ export function initializeOverlayRuntime(options: {
override?: string | null, override?: string | null,
targetMpvSocketPath?: string | null, targetMpvSocketPath?: string | null,
) => BaseWindowTracker | null; ) => BaseWindowTracker | null;
getMpvSocketPath: () => string;
getResolvedConfig: () => { ankiConnect?: AnkiConnectConfig; ai?: AiConfig };
getSubtitleTimingTracker: () => unknown | null;
getMpvClient: () => {
send?: (payload: { command: string[] }) => void;
} | null;
getRuntimeOptionsManager: () => {
getEffectiveAnkiConnectConfig: (config?: AnkiConnectConfig) => AnkiConnectConfig;
} | null;
setAnkiIntegration: (integration: unknown | null) => void;
showDesktopNotification: (title: string, options: { body?: string; icon?: string }) => void;
createFieldGroupingCallback: () => (
data: KikuFieldGroupingRequestData,
) => Promise<KikuFieldGroupingChoice>;
getKnownWordCacheStatePath: () => string;
shouldStartAnkiIntegration?: () => boolean;
createAnkiIntegration?: (args: CreateAnkiIntegrationArgs) => AnkiIntegrationLike;
}): void { }): void {
options.createMainWindow(); options.createMainWindow();
options.registerGlobalShortcuts(); options.registerGlobalShortcuts();
@@ -112,17 +113,48 @@ export function initializeOverlayRuntime(options: {
windowTracker.start(); windowTracker.start();
} }
initializeOverlayAnkiIntegration(options);
options.updateVisibleOverlayVisibility();
}
export function initializeOverlayAnkiIntegration(options: {
getResolvedConfig: () => { ankiConnect?: AnkiConnectConfig; ai?: AiConfig };
getSubtitleTimingTracker: () => unknown | null;
getMpvClient: () => {
send?: (payload: { command: string[] }) => void;
} | null;
getRuntimeOptionsManager: () => {
getEffectiveAnkiConnectConfig: (config?: AnkiConnectConfig) => AnkiConnectConfig;
} | null;
getAnkiIntegration?: () => unknown | null;
setAnkiIntegration: (integration: unknown | null) => void;
showDesktopNotification: (title: string, options: { body?: string; icon?: string }) => void;
createFieldGroupingCallback: () => (
data: KikuFieldGroupingRequestData,
) => Promise<KikuFieldGroupingChoice>;
getKnownWordCacheStatePath: () => string;
shouldStartAnkiIntegration?: () => boolean;
createAnkiIntegration?: (args: CreateAnkiIntegrationArgs) => AnkiIntegrationLike;
}): boolean {
if (options.getAnkiIntegration?.()) {
return false;
}
const config = options.getResolvedConfig(); const config = options.getResolvedConfig();
const subtitleTimingTracker = options.getSubtitleTimingTracker(); const subtitleTimingTracker = options.getSubtitleTimingTracker();
const mpvClient = options.getMpvClient(); const mpvClient = options.getMpvClient();
const runtimeOptionsManager = options.getRuntimeOptionsManager(); const runtimeOptionsManager = options.getRuntimeOptionsManager();
if ( if (
config.ankiConnect?.enabled === true && config.ankiConnect?.enabled !== true ||
subtitleTimingTracker && !subtitleTimingTracker ||
mpvClient && !mpvClient ||
runtimeOptionsManager !runtimeOptionsManager
) { ) {
return false;
}
const effectiveAnkiConfig = runtimeOptionsManager.getEffectiveAnkiConnectConfig( const effectiveAnkiConfig = runtimeOptionsManager.getEffectiveAnkiConnectConfig(
config.ankiConnect, config.ankiConnect,
); );
@@ -140,7 +172,5 @@ export function initializeOverlayRuntime(options: {
integration.start(); integration.start();
} }
options.setAnkiIntegration(integration); options.setAnkiIntegration(integration);
} return true;
options.updateVisibleOverlayVisibility();
} }

View File

@@ -194,3 +194,167 @@ test('runAppReadyRuntime headless refresh bootstraps Anki runtime without UI sta
'run-headless-command', 'run-headless-command',
]); ]);
}); });
test('runAppReadyRuntime loads Yomitan before headless overlay fallback initialization', async () => {
const calls: string[] = [];
await runAppReadyRuntime({
ensureDefaultConfigBootstrap: () => {
calls.push('bootstrap');
},
loadSubtitlePosition: () => {
calls.push('load-subtitle-position');
},
resolveKeybindings: () => {
calls.push('resolve-keybindings');
},
createMpvClient: () => {
calls.push('create-mpv');
},
reloadConfig: () => {
calls.push('reload-config');
},
getResolvedConfig: () => ({}),
getConfigWarnings: () => [],
logConfigWarning: () => {},
setLogLevel: () => {},
initRuntimeOptionsManager: () => {
calls.push('init-runtime-options');
},
setSecondarySubMode: () => {},
defaultSecondarySubMode: 'hover',
defaultWebsocketPort: 0,
defaultAnnotationWebsocketPort: 0,
defaultTexthookerPort: 0,
hasMpvWebsocketPlugin: () => false,
startSubtitleWebsocket: () => {},
startAnnotationWebsocket: () => {},
startTexthooker: () => {},
log: () => {},
createMecabTokenizerAndCheck: async () => {},
createSubtitleTimingTracker: () => {
calls.push('subtitle-timing');
},
createImmersionTracker: () => {},
startJellyfinRemoteSession: async () => {},
loadYomitanExtension: async () => {
calls.push('load-yomitan');
},
handleFirstRunSetup: async () => {},
prewarmSubtitleDictionaries: async () => {},
startBackgroundWarmups: () => {},
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => false,
setVisibleOverlayVisible: () => {},
initializeOverlayRuntime: () => {
calls.push('init-overlay');
},
handleInitialArgs: () => {
calls.push('handle-initial-args');
},
shouldRunHeadlessInitialCommand: () => true,
shouldUseMinimalStartup: () => false,
shouldSkipHeavyStartup: () => false,
});
assert.deepEqual(calls, [
'bootstrap',
'reload-config',
'init-runtime-options',
'create-mpv',
'subtitle-timing',
'load-yomitan',
'init-overlay',
'handle-initial-args',
]);
});
test('runAppReadyRuntime loads Yomitan before auto-initializing overlay runtime', async () => {
const calls: string[] = [];
await runAppReadyRuntime({
ensureDefaultConfigBootstrap: () => {
calls.push('bootstrap');
},
loadSubtitlePosition: () => {
calls.push('load-subtitle-position');
},
resolveKeybindings: () => {
calls.push('resolve-keybindings');
},
createMpvClient: () => {
calls.push('create-mpv');
},
reloadConfig: () => {
calls.push('reload-config');
},
getResolvedConfig: () => ({
websocket: { enabled: false },
annotationWebsocket: { enabled: false },
texthooker: { launchAtStartup: false },
}),
getConfigWarnings: () => [],
logConfigWarning: () => {},
setLogLevel: () => {
calls.push('set-log-level');
},
initRuntimeOptionsManager: () => {
calls.push('init-runtime-options');
},
setSecondarySubMode: () => {
calls.push('set-secondary-sub-mode');
},
defaultSecondarySubMode: 'hover',
defaultWebsocketPort: 0,
defaultAnnotationWebsocketPort: 0,
defaultTexthookerPort: 0,
hasMpvWebsocketPlugin: () => false,
startSubtitleWebsocket: () => {
calls.push('subtitle-ws');
},
startAnnotationWebsocket: () => {
calls.push('annotation-ws');
},
startTexthooker: () => {
calls.push('texthooker');
},
log: () => {
calls.push('log');
},
createMecabTokenizerAndCheck: async () => {},
createSubtitleTimingTracker: () => {
calls.push('subtitle-timing');
},
createImmersionTracker: () => {
calls.push('immersion');
},
startJellyfinRemoteSession: async () => {},
loadYomitanExtension: async () => {
calls.push('load-yomitan');
},
handleFirstRunSetup: async () => {
calls.push('first-run');
},
prewarmSubtitleDictionaries: async () => {},
startBackgroundWarmups: () => {
calls.push('warmups');
},
texthookerOnlyMode: false,
shouldAutoInitializeOverlayRuntimeFromConfig: () => true,
setVisibleOverlayVisible: () => {
calls.push('visible-overlay');
},
initializeOverlayRuntime: () => {
calls.push('init-overlay');
},
handleInitialArgs: () => {
calls.push('handle-initial-args');
},
shouldUseMinimalStartup: () => false,
shouldSkipHeavyStartup: () => false,
});
assert.ok(calls.indexOf('load-yomitan') !== -1);
assert.ok(calls.indexOf('init-overlay') !== -1);
assert.ok(calls.indexOf('load-yomitan') < calls.indexOf('init-overlay'));
});

View File

@@ -194,6 +194,7 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
} else { } else {
deps.createMpvClient(); deps.createMpvClient();
deps.createSubtitleTimingTracker(); deps.createSubtitleTimingTracker();
await deps.loadYomitanExtension();
deps.initializeOverlayRuntime(); deps.initializeOverlayRuntime();
deps.handleInitialArgs(); deps.handleInitialArgs();
} }
@@ -290,13 +291,14 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
if (deps.texthookerOnlyMode) { if (deps.texthookerOnlyMode) {
deps.log('Texthooker-only mode enabled; skipping overlay window.'); deps.log('Texthooker-only mode enabled; skipping overlay window.');
} else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) { } else if (deps.shouldAutoInitializeOverlayRuntimeFromConfig()) {
await deps.loadYomitanExtension();
deps.setVisibleOverlayVisible(true); deps.setVisibleOverlayVisible(true);
deps.initializeOverlayRuntime(); deps.initializeOverlayRuntime();
} else { } else {
deps.log('Overlay runtime deferred: waiting for explicit overlay command.'); deps.log('Overlay runtime deferred: waiting for explicit overlay command.');
await deps.loadYomitanExtension();
} }
await deps.loadYomitanExtension();
await deps.handleFirstRunSetup(); await deps.handleFirstRunSetup();
deps.handleInitialArgs(); deps.handleInitialArgs();
deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`); deps.logDebug?.(`App-ready critical path finished in ${now() - startupStartedAtMs}ms.`);

View File

@@ -1,7 +1,7 @@
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import test from 'node:test'; import test from 'node:test';
import { parseSrtCues, parseAssCues, parseSubtitleCues } from './subtitle-cue-parser'; import { parseSrtCues, parseAssCues, parseSubtitleCues } from './subtitle-cue-parser';
import type { SubtitleCue } from './subtitle-cue-parser'; import type { SubtitleCue } from '../../types';
test('parseSrtCues parses basic SRT content', () => { test('parseSrtCues parses basic SRT content', () => {
const content = [ const content = [

View File

@@ -183,7 +183,13 @@ export function parseSubtitleCues(content: string, filename: string): SubtitleCu
cues = parseAssCues(content); cues = parseAssCues(content);
break; break;
default: default:
return []; cues = [];
}
if (cues.length === 0) {
const assCues = parseAssCues(content);
const srtCues = parseSrtCues(content);
cues = assCues.length >= srtCues.length ? assCues : srtCues;
} }
cues.sort((a, b) => a.startTime - b.startTime); cues.sort((a, b) => a.startTime - b.startTime);

View File

@@ -1,8 +1,8 @@
import assert from 'node:assert/strict'; import assert from 'node:assert/strict';
import test from 'node:test'; import test from 'node:test';
import { computePriorityWindow, createSubtitlePrefetchService } from './subtitle-prefetch'; import { computePriorityWindow, createSubtitlePrefetchService } from './subtitle-prefetch';
import type { SubtitleCue } from './subtitle-cue-parser';
import type { SubtitleData } from '../../types'; import type { SubtitleData } from '../../types';
import type { SubtitleCue } from '../../types';
function makeCues(count: number, startOffset = 0): SubtitleCue[] { function makeCues(count: number, startOffset = 0): SubtitleCue[] {
return Array.from({ length: count }, (_, i) => ({ return Array.from({ length: count }, (_, i) => ({

View File

@@ -1,5 +1,5 @@
import type { SubtitleCue } from './subtitle-cue-parser';
import type { SubtitleData } from '../../types'; import type { SubtitleData } from '../../types';
import type { SubtitleCue } from '../../types';
export interface SubtitlePrefetchServiceDeps { export interface SubtitlePrefetchServiceDeps {
cues: SubtitleCue[]; cues: SubtitleCue[];

View File

@@ -0,0 +1,25 @@
import type { YoutubeFlowMode } from '../../../types';
import type { YoutubeTrackOption } from './track-probe';
import { downloadYoutubeSubtitleTrack, downloadYoutubeSubtitleTracks } from './track-download';
export function isYoutubeGenerationMode(mode: YoutubeFlowMode): boolean {
return mode === 'generate';
}
export async function acquireYoutubeSubtitleTrack(input: {
targetUrl: string;
outputDir: string;
track: YoutubeTrackOption;
mode: YoutubeFlowMode;
}): Promise<{ path: string }> {
return await downloadYoutubeSubtitleTrack(input);
}
export async function acquireYoutubeSubtitleTracks(input: {
targetUrl: string;
outputDir: string;
tracks: YoutubeTrackOption[];
mode: YoutubeFlowMode;
}): Promise<Map<string, string>> {
return await downloadYoutubeSubtitleTracks(input);
}

View File

@@ -0,0 +1 @@
export type YoutubeTrackKind = 'manual' | 'auto';

View File

@@ -0,0 +1,41 @@
import type { YoutubeTrackKind } from './kinds';
export type { YoutubeTrackKind };
export function normalizeYoutubeLangCode(value: string): string {
return value.trim().toLowerCase().replace(/_/g, '-').replace(/[^a-z0-9-]+/g, '');
}
export function isJapaneseYoutubeLang(value: string): boolean {
const normalized = normalizeYoutubeLangCode(value);
return (
normalized === 'ja' ||
normalized === 'jp' ||
normalized === 'jpn' ||
normalized === 'japanese' ||
normalized.startsWith('ja-') ||
normalized.startsWith('jp-')
);
}
export function isEnglishYoutubeLang(value: string): boolean {
const normalized = normalizeYoutubeLangCode(value);
return (
normalized === 'en' ||
normalized === 'eng' ||
normalized === 'english' ||
normalized === 'enus' ||
normalized === 'en-us' ||
normalized.startsWith('en-')
);
}
export function formatYoutubeTrackLabel(input: {
language: string;
kind: YoutubeTrackKind;
title?: string;
}): string {
const language = input.language.trim() || 'unknown';
const base = input.title?.trim() || language;
return `${base} (${input.kind})`;
}

View File

@@ -0,0 +1,49 @@
import assert from 'node:assert/strict';
import fs from 'node:fs';
import os from 'node:os';
import path from 'node:path';
import test from 'node:test';
import { probeYoutubeVideoMetadata } from './metadata-probe';
async function withTempDir<T>(fn: (dir: string) => Promise<T>): Promise<T> {
const dir = fs.mkdtempSync(path.join(os.tmpdir(), 'subminer-youtube-metadata-probe-'));
try {
return await fn(dir);
} finally {
fs.rmSync(dir, { recursive: true, force: true });
}
}
function makeFakeYtDlpScript(dir: string, payload: string): void {
const scriptPath = path.join(dir, 'yt-dlp');
const script = `#!/usr/bin/env node
process.stdout.write(${JSON.stringify(payload)});
`;
fs.writeFileSync(scriptPath, script, 'utf8');
if (process.platform !== 'win32') {
fs.chmodSync(scriptPath, 0o755);
}
fs.writeFileSync(scriptPath + '.cmd', `@echo off\r\nnode "${scriptPath}"\r\n`, 'utf8');
}
async function withFakeYtDlp<T>(payload: string, fn: () => Promise<T>): Promise<T> {
return await withTempDir(async (root) => {
const binDir = path.join(root, 'bin');
fs.mkdirSync(binDir, { recursive: true });
makeFakeYtDlpScript(binDir, payload);
const originalPath = process.env.PATH ?? '';
process.env.PATH = `${binDir}${path.delimiter}${originalPath}`;
try {
return await fn();
} finally {
process.env.PATH = originalPath;
}
});
}
test('probeYoutubeVideoMetadata returns null on malformed yt-dlp JSON', async () => {
await withFakeYtDlp('not-json', async () => {
const result = await probeYoutubeVideoMetadata('https://www.youtube.com/watch?v=abc123');
assert.equal(result, null);
});
});

Some files were not shown because too many files have changed in this diff Show More