mirror of
https://github.com/ksyasuda/SubMiner.git
synced 2026-04-14 04:19:26 -07:00
Compare commits
6 Commits
v0.12.0-be
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
7ac51cd5e9
|
|||
| 52bab1d611 | |||
|
49e46e6b9b
|
|||
|
c1c40c8d40
|
|||
|
c71482cb44
|
|||
| 05cf4a6fe5 |
67
.github/workflows/prerelease.yml
vendored
67
.github/workflows/prerelease.yml
vendored
@@ -32,9 +32,9 @@ jobs:
|
||||
node_modules
|
||||
stats/node_modules
|
||||
vendor/subminer-yomitan/node_modules
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
${{ runner.os }}-${{ runner.arch }}-bun-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -50,6 +50,9 @@ jobs:
|
||||
- name: Test suite (source)
|
||||
run: bun run test:fast
|
||||
|
||||
- name: Environment suite
|
||||
run: bun run test:env
|
||||
|
||||
- name: Coverage suite (maintained source lane)
|
||||
run: bun run test:coverage:src
|
||||
|
||||
@@ -103,9 +106,9 @@ jobs:
|
||||
stats/node_modules
|
||||
vendor/texthooker-ui/node_modules
|
||||
vendor/subminer-yomitan/node_modules
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
${{ runner.os }}-${{ runner.arch }}-bun-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -137,6 +140,7 @@ jobs:
|
||||
with:
|
||||
name: appimage
|
||||
path: release/*.AppImage
|
||||
if-no-files-found: error
|
||||
|
||||
build-macos:
|
||||
needs: [quality-gate]
|
||||
@@ -161,9 +165,9 @@ jobs:
|
||||
stats/node_modules
|
||||
vendor/texthooker-ui/node_modules
|
||||
vendor/subminer-yomitan/node_modules
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
${{ runner.os }}-${{ runner.arch }}-bun-
|
||||
|
||||
- name: Validate macOS signing/notarization secrets
|
||||
run: |
|
||||
@@ -212,6 +216,7 @@ jobs:
|
||||
path: |
|
||||
release/*.dmg
|
||||
release/*.zip
|
||||
if-no-files-found: error
|
||||
|
||||
build-windows:
|
||||
needs: [quality-gate]
|
||||
@@ -236,9 +241,9 @@ jobs:
|
||||
stats/node_modules
|
||||
vendor/texthooker-ui/node_modules
|
||||
vendor/subminer-yomitan/node_modules
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ hashFiles('bun.lock', 'stats/bun.lock', 'vendor/texthooker-ui/package.json', 'vendor/subminer-yomitan/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
${{ runner.os }}-${{ runner.arch }}-bun-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
@@ -304,9 +309,9 @@ jobs:
|
||||
path: |
|
||||
~/.bun/install/cache
|
||||
node_modules
|
||||
key: ${{ runner.os }}-bun-${{ hashFiles('bun.lock') }}
|
||||
key: ${{ runner.os }}-${{ runner.arch }}-bun-${{ hashFiles('bun.lock') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-bun-
|
||||
${{ runner.os }}-${{ runner.arch }}-bun-
|
||||
|
||||
- name: Install dependencies
|
||||
run: bun install --frozen-lockfile
|
||||
@@ -339,7 +344,12 @@ jobs:
|
||||
echo "No release artifacts found for checksum generation."
|
||||
exit 1
|
||||
fi
|
||||
sha256sum "${files[@]}" > release/SHA256SUMS.txt
|
||||
: > release/SHA256SUMS.txt
|
||||
for file in "${files[@]}"; do
|
||||
printf '%s %s\n' \
|
||||
"$(sha256sum "$file" | awk '{print $1}')" \
|
||||
"${file##*/}" >> release/SHA256SUMS.txt
|
||||
done
|
||||
|
||||
- name: Get version from tag
|
||||
id: version
|
||||
@@ -354,20 +364,6 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
|
||||
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
||||
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
||||
--draft=false \
|
||||
--prerelease \
|
||||
--title "${{ steps.version.outputs.VERSION }}" \
|
||||
--notes-file release/prerelease-notes.md
|
||||
else
|
||||
gh release create "${{ steps.version.outputs.VERSION }}" \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--title "${{ steps.version.outputs.VERSION }}" \
|
||||
--notes-file release/prerelease-notes.md
|
||||
fi
|
||||
|
||||
shopt -s nullglob
|
||||
artifacts=(
|
||||
release/*.AppImage
|
||||
@@ -384,6 +380,27 @@ jobs:
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if gh release view "${{ steps.version.outputs.VERSION }}" >/dev/null 2>&1; then
|
||||
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
||||
--draft \
|
||||
--prerelease \
|
||||
--title "${{ steps.version.outputs.VERSION }}" \
|
||||
--notes-file release/prerelease-notes.md
|
||||
else
|
||||
gh release create "${{ steps.version.outputs.VERSION }}" \
|
||||
--draft \
|
||||
--latest=false \
|
||||
--prerelease \
|
||||
--title "${{ steps.version.outputs.VERSION }}" \
|
||||
--notes-file release/prerelease-notes.md
|
||||
fi
|
||||
|
||||
for asset in "${artifacts[@]}"; do
|
||||
gh release upload "${{ steps.version.outputs.VERSION }}" "$asset" --clobber
|
||||
done
|
||||
|
||||
gh release edit "${{ steps.version.outputs.VERSION }}" \
|
||||
--draft=false \
|
||||
--prerelease \
|
||||
--title "${{ steps.version.outputs.VERSION }}" \
|
||||
--notes-file release/prerelease-notes.md
|
||||
|
||||
7
.github/workflows/release.yml
vendored
7
.github/workflows/release.yml
vendored
@@ -340,7 +340,12 @@ jobs:
|
||||
echo "No release artifacts found for checksum generation."
|
||||
exit 1
|
||||
fi
|
||||
sha256sum "${files[@]}" > release/SHA256SUMS.txt
|
||||
: > release/SHA256SUMS.txt
|
||||
for file in "${files[@]}"; do
|
||||
printf '%s %s\n' \
|
||||
"$(sha256sum "$file" | awk '{print $1}')" \
|
||||
"${file##*/}" >> release/SHA256SUMS.txt
|
||||
done
|
||||
|
||||
- name: Get version from tag
|
||||
id: version
|
||||
|
||||
38
CHANGELOG.md
38
CHANGELOG.md
@@ -1,5 +1,43 @@
|
||||
# Changelog
|
||||
|
||||
## v0.12.0 (2026-04-11)
|
||||
|
||||
### Changed
|
||||
- Overlay: Added configurable overlay shortcuts for session help, controller select, and controller debug actions.
|
||||
- Overlay: Added mpv/plugin and CLI routing for session help, controller utilities, and subtitle sidebar toggling through the shared session-action path.
|
||||
- Overlay: Improved dedicated overlay modal retry and focus handling for runtime options, Jimaku, session help, controller tools, and the playlist browser.
|
||||
- Overlay: Fixed controller configuration and controller debug shortcut opens so configured bindings bring up their modals again instead of tripping renderer recovery.
|
||||
- Stats: Sessions are rolled up per episode within each day, with a bulk delete that wipes every session in the group.
|
||||
- Stats: Trends add a 365-day range next to the existing 7d/30d/90d/all options.
|
||||
- Stats: Library detail view gets a delete-episode action that removes the video and all its sessions.
|
||||
- Stats: Vocabulary Top 50 tightens the word/reading column so katakana entries no longer push the scores off screen.
|
||||
- Stats: Episode detail hides card events whose Anki notes have been deleted, instead of showing phantom mining activity.
|
||||
- Stats: Trend and watch-time charts share a unified theme with horizontal gridlines and larger ticks for legibility.
|
||||
- Stats: Overview, Library, Trends, Sessions, and Vocabulary now use generic "title" wording so YouTube videos and anime live comfortably side by side in the dashboard.
|
||||
- Stats: Session timeline no longer plots seek-forward/seek-backward markers — they were too noisy on sessions with lots of rewinds.
|
||||
- Stats: Replaced the "Library — Per Day" section on the Stats → Trends page with a "Library — Summary" section. The new section shows a top-10 watch-time leaderboard chart and a sortable per-title table (watch time, videos, sessions, cards, words, lookups, lookups/100w, date range), all scoped to the current date range selector.
|
||||
|
||||
### Fixed
|
||||
- Overlay: Fixed overlay drag-and-drop routing so dropping external subtitle files like `.ass` onto mpv still loads them when the overlay is visible.
|
||||
- Overlay: Addressed the latest CodeRabbit follow-ups on PR #49, including generation-scoped Lua session binding names, stricter session command validation, session-help shortcut visibility, the numeric-selection key guard, stats-overlay startup classification, and safer session-binding persistence.
|
||||
- Overlay: Addressed the latest CodeRabbit follow-ups on the Windows overlay flow, including exact mpv target resolution, lower-overlay helper arguments, Win32 failure detection, and overlay cleanup on tracker loss.
|
||||
- Overlay: Fixed Windows overlay z-order so the visible subtitle overlay stops staying above unrelated apps after mpv loses focus.
|
||||
- Overlay: Fixed Windows overlay tracking to use native window polling and owner/z-order binding, which keeps the subtitle overlay aligned to the active mpv window more reliably.
|
||||
- Overlay: Fixed Windows overlay hide/restore behavior so minimizing mpv immediately hides the overlay and restoring mpv brings it back on top of the mpv window without requiring a click.
|
||||
- Overlay: Fixed stats overlay layering so the in-player stats page now stays above mpv and the subtitle overlay while it is open.
|
||||
- Overlay: Fixed Windows subtitle overlay stability so transient tracker misses and restore events keep the current subtitle visible instead of waiting for the next subtitle line.
|
||||
- Overlay: Fixed Windows focus handoff from the interactive subtitle overlay back to mpv so the overlay no longer drops behind mpv and briefly disappears.
|
||||
- Overlay: Fixed Windows visible-overlay startup so it no longer briefly opens as an interactive or opaque surface before the tracked transparent overlay state settles.
|
||||
- Overlay: Fixed spurious auto-pause after overlay visibility recovery and window resize so the overlay no longer pauses mpv until the pointer genuinely re-enters the subtitle area.
|
||||
- Overlay: Fixed Windows secondary subtitle hover mode so the expanded hover hit area no longer blocks the native minimize, maximize, and close buttons.
|
||||
- Overlay: Fixed Windows Yomitan popup focus loss after closing nested lookups so the original popup stays interactive instead of falling through to mpv.
|
||||
- Stats: Fixed immersion-tracker timestamp handling under Bun/libsql so library rows, session timelines, and lifetime summaries keep real wall-clock millisecond values instead of truncating to invalid negative timestamps.
|
||||
- Mpv Plugin: Fixed the mpv Lua plugin so hover and environment modules no longer use the `goto continue` pattern that can fail to parse on some user Lua runtimes.
|
||||
|
||||
### Internal
|
||||
- Release: Added a dedicated beta/rc prerelease GitHub Actions workflow that publishes GitHub prereleases without consuming pending changelog fragments or updating AUR.
|
||||
- Release: Added prerelease note generation so beta and release-candidate tags can reuse the current pending `changes/*.md` fragments while leaving stable changelog publication for the final release cut.
|
||||
|
||||
## v0.11.2 (2026-04-07)
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -0,0 +1,34 @@
|
||||
---
|
||||
id: TASK-285
|
||||
title: Rename anime visibility filter heading to title visibility
|
||||
status: Done
|
||||
assignee:
|
||||
- codex
|
||||
created_date: '2026-04-10 00:00'
|
||||
updated_date: '2026-04-10 00:00'
|
||||
labels:
|
||||
- stats
|
||||
- ui
|
||||
- bug
|
||||
milestone: m-1
|
||||
dependencies: []
|
||||
references:
|
||||
- stats/src/components/trends/TrendsTab.tsx
|
||||
- stats/src/components/trends/TrendsTab.test.tsx
|
||||
priority: low
|
||||
ordinal: 200000
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Align the library cumulative trends filter UI with the new terminology by renaming the hardcoded anime visibility heading to title visibility.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 The trends filter heading uses `Title Visibility`
|
||||
- [x] #2 The component behavior and props stay unchanged
|
||||
- [x] #3 A regression test covers the rendered heading text
|
||||
<!-- AC:END -->
|
||||
@@ -0,0 +1,63 @@
|
||||
---
|
||||
id: TASK-286
|
||||
title: 'Assess and address PR #49 CodeRabbit review follow-ups'
|
||||
status: Done
|
||||
assignee:
|
||||
- codex
|
||||
created_date: '2026-04-11 18:55'
|
||||
updated_date: '2026-04-11 22:40'
|
||||
labels:
|
||||
- bug
|
||||
- code-review
|
||||
- windows
|
||||
- overlay
|
||||
dependencies: []
|
||||
references:
|
||||
- src/main/runtime/config-hot-reload-handlers.ts
|
||||
- src/renderer/handlers/keyboard.ts
|
||||
- src/renderer/handlers/mouse.ts
|
||||
- vendor/subminer-yomitan
|
||||
priority: high
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Track the current PR #49 review round and resolve the actionable CodeRabbit findings on the Windows update branch.
|
||||
|
||||
Focus areas include the renderer mouse interaction fix, config hot-reload keyboard state, and any other review items that still apply after verifying the current branch state.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 All actionable CodeRabbit comments on PR #49 are either fixed or shown to be obsolete with evidence.
|
||||
- [x] #2 Regression tests are added or updated for any behavior change that could regress.
|
||||
- [x] #3 The branch passes the repo's relevant verification checks for the touched areas.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
<!-- SECTION:PLAN:BEGIN -->
|
||||
1. Pull the current unresolved CodeRabbit review threads for PR #49 and cluster them into still-actionable fixes versus obsolete/nit-only items.
|
||||
2. For each still-actionable behavior bug, add or extend the narrowest failing test first in the touched suite before changing production code.
|
||||
3. Implement the minimal fixes across the affected runtime, renderer, plugin, IPC, and Windows tracker files, keeping each change traceable to the review thread.
|
||||
4. Run targeted verification for the touched areas, update task notes with assessment results, and capture which review comments were fixed versus assessed as obsolete or deferred nitpicks.
|
||||
<!-- SECTION:PLAN:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Assessed PR #49 CodeRabbit threads. Fixed the real regressions in first-run CLI gating, IPC session-action validation, renderer controller-modal lifecycle notifications, async subtitle-sidebar toggle guarding, plugin config-dir resolution priority, prerelease artifact upload failure handling, immersion tracker lazy startup, win32 z-order error handling, and Windows socket-aware mpv matching.
|
||||
|
||||
Review assessment: the overlay-shortcut lifecycle comment is obsolete for the current architecture because overlay-local shortcuts are intentionally handled through the local fallback path and the runtime only tracks configured-state/cleanup. Refactor-only nit comments for splitting `scripts/build-changelog.ts` and `src/core/services/session-bindings.ts` were left as follow-up quality work, not behavior bugs in this PR round.
|
||||
|
||||
Verification: `bun test src/main/runtime/first-run-setup-service.test.ts src/core/services/session-bindings.test.ts src/core/services/app-ready.test.ts src/core/services/ipc.test.ts src/renderer/handlers/keyboard.test.ts src/main/overlay-runtime.test.ts src/window-trackers/mpv-socket-match.test.ts`, `bun test src/window-trackers/windows-tracker.test.ts`, `bun run typecheck`, `lua scripts/test-plugin-lua-compat.lua`.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Assessed the current CodeRabbit round on PR #49 and addressed the still-valid behavior issues rather than blanket-applying every bot suggestion. The branch now treats the new session/stats CLI flags as explicit startup commands during first-run setup, validates the new session actions through IPC, points session-binding command diagnostics at the correct config field, keeps immersion tracker startup lazy until later runtime triggers, and notifies overlay modal lifecycle state when controller-select/debug are opened from local keyboard bindings. I also switched the subtitle-sidebar IPC callback to the async guarded path so promise rejections feed renderer recovery instead of being dropped.
|
||||
|
||||
On the Windows/plugin side, the mpv plugin now prefers config-file matches before falling back to an existing config directory, prerelease workflow uploads fail if expected Linux/macOS artifacts are missing, the Win32 z-order bind path now validates the `GetWindowLongW` call for the window above mpv, and the Windows tracker now passes the target socket path into native polling and filters mpv instances by command line so multiple sockets can be distinguished on Windows. Added/updated regression coverage for first-run gating, IPC validation, session-binding diagnostics, controller modal lifecycle notifications, modal ready-listener dispatch, and socket-path matching. Verification run: `bun run typecheck`, the targeted Bun test suites for the touched areas, `bun test src/window-trackers/windows-tracker.test.ts`, and `lua scripts/test-plugin-lua-compat.lua`.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,61 @@
|
||||
---
|
||||
id: TASK-286.1
|
||||
title: 'Assess and address PR #49 subsequent CodeRabbit review round'
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-11 23:14'
|
||||
updated_date: '2026-04-11 23:16'
|
||||
labels:
|
||||
- bug
|
||||
- code-review
|
||||
- windows
|
||||
- release
|
||||
dependencies: []
|
||||
references:
|
||||
- .github/workflows/prerelease.yml
|
||||
- src/window-trackers/mpv-socket-match.ts
|
||||
- src/window-trackers/win32.ts
|
||||
- src/core/services/overlay-shortcut.ts
|
||||
parent_task_id: TASK-286
|
||||
priority: high
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Track the next unresolved CodeRabbit review threads on PR #49 after commit 9ce5de2f and resolve the still-valid follow-up issues without reopening already-assessed stale comments.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 All still-actionable CodeRabbit comments in the latest PR #49 round are fixed or explicitly shown stale with evidence.
|
||||
- [x] #2 Regression coverage is added or updated for any behavior-sensitive fix in workflow or Windows socket matching.
|
||||
- [x] #3 Relevant verification passes for the touched workflow, tracker, and shared matcher changes.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
<!-- SECTION:PLAN:BEGIN -->
|
||||
1. Verify the five unresolved CodeRabbit threads against current branch state and separate still-valid bugs from stale comments.
|
||||
2. Add or extend the narrowest failing tests for exact socket-path matching and prerelease workflow invariants before changing production code.
|
||||
3. Implement minimal fixes in the prerelease workflow and Windows socket matching/cache path, leaving stale comments documented with evidence instead of forcing no-op changes.
|
||||
4. Run targeted verification, then record the fixed-vs-stale assessment and close the subtask.
|
||||
<!-- SECTION:PLAN:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Assessed five unresolved PR #49 threads after 9ce5de2f. Fixed prerelease workflow cache keys to include `runner.arch`, changed prerelease publishing to validate artifacts before release creation/edit and only undraft after uploads complete, tightened Windows socket matching to require exact argument boundaries, and stopped memoizing null command-line lookup misses in the Win32 cache path.
|
||||
|
||||
Stale assessment: the `src/core/services/overlay-shortcut.ts` thread is still obsolete. Current code at `registerOverlayShortcuts()` returns `hasConfiguredOverlayShortcuts(shortcuts)`, not `false`, and the overlay-local handling remains intentionally driven by local fallback dispatch rather than global registration in this runtime path.
|
||||
|
||||
Verification: `bun test src/prerelease-workflow.test.ts src/window-trackers/mpv-socket-match.test.ts`, `bun test src/window-trackers/windows-tracker.test.ts src/prerelease-workflow.test.ts src/window-trackers/mpv-socket-match.test.ts`, `bun run typecheck`.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Handled the next CodeRabbit round on PR #49 by fixing the still-valid prerelease workflow and Windows socket-matching issues while documenting the stale overlay-shortcut comment instead of forcing a no-op code change. The prerelease workflow now scopes all dependency caches by `runner.arch`, validates the final artifact set before touching the GitHub release, creates/edits the prerelease as a draft during uploads, and only flips `--draft=false` after all assets succeed. On Windows, socket matching now requires an exact `--input-ipc-server` argument boundary so `subminer-1` no longer matches `subminer-10`, and transient PowerShell/CIM misses no longer get cached forever as null command lines.
|
||||
|
||||
Regression coverage was added for the workflow invariants and exact socket matching. Verification passed with targeted prerelease workflow tests, Windows tracker tests, socket-matcher tests, and `bun run typecheck`.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,49 @@
|
||||
---
|
||||
id: TASK-286.2
|
||||
title: 'Assess and address PR #49 next CodeRabbit review round'
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-12 02:50'
|
||||
updated_date: '2026-04-12 02:52'
|
||||
labels:
|
||||
- bug
|
||||
- code-review
|
||||
- release
|
||||
- testing
|
||||
dependencies: []
|
||||
references:
|
||||
- .github/workflows/prerelease.yml
|
||||
- src/prerelease-workflow.test.ts
|
||||
- src/core/services/overlay-shortcut.ts
|
||||
parent_task_id: TASK-286
|
||||
priority: high
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Track the next unresolved CodeRabbit review threads on PR #49 after commit 62ad77dc and resolve the still-valid follow-up issues while documenting stale repeats.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 All still-actionable CodeRabbit comments in the latest PR #49 round are fixed or explicitly shown stale with evidence.
|
||||
- [x] #2 Regression coverage is updated for any workflow or test changes made in this round.
|
||||
- [x] #3 Relevant verification passes for the touched workflow and prerelease test changes.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Assessed latest unresolved CodeRabbit round on PR #49. `src/core/services/overlay-shortcut.ts` comment is stale: `registerOverlayShortcuts()` returns `hasConfiguredOverlayShortcuts(shortcuts)`, so runtime registration is not hard-coded false.
|
||||
|
||||
Added exact, line-ending-agnostic prerelease tag trigger assertions in `src/prerelease-workflow.test.ts` and a regression asserting `bun run test:env` sits in the prerelease quality gate before source coverage.
|
||||
|
||||
Updated `.github/workflows/prerelease.yml` quality-gate to run `bun run test:env` after `bun run test:fast`.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Assessed the latest CodeRabbit round for PR #49. Left the `overlay-shortcut.ts` thread open as stale with code evidence, tightened prerelease workflow trigger coverage, and added the missing `test:env` step to the prerelease quality gate. Verification: `bun test src/prerelease-workflow.test.ts`; `bun run typecheck`.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,48 @@
|
||||
---
|
||||
id: TASK-286.3
|
||||
title: 'Assess and address PR #49 latest CodeRabbit review round'
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-12 03:08'
|
||||
updated_date: '2026-04-12 03:09'
|
||||
labels:
|
||||
- bug
|
||||
- code-review
|
||||
- testing
|
||||
dependencies: []
|
||||
references:
|
||||
- 'PR #49'
|
||||
- .github/workflows/prerelease.yml
|
||||
- src
|
||||
parent_task_id: TASK-286
|
||||
priority: high
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Track the newest unresolved CodeRabbit review threads on PR #49 after commit 942c1649, fix the still-valid issues, verify them, and push the branch update.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 All still-actionable CodeRabbit comments in the newest PR #49 round are fixed or explicitly identified stale with evidence.
|
||||
- [x] #2 Regression coverage is added or updated for behavior touched in this round.
|
||||
- [x] #3 Relevant verification passes before commit and push.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Fetched the newest unresolved CodeRabbit threads for PR #49 after commit `942c1649`; only one unresolved actionable thread remained, on prerelease checksum output using repo-relative paths instead of asset basenames.
|
||||
|
||||
Added regression coverage in `src/prerelease-workflow.test.ts` and `src/release-workflow.test.ts` asserting checksum generation truncates to asset basenames and no longer writes the raw `sha256sum "${files[@]}" > release/SHA256SUMS.txt` form.
|
||||
|
||||
Updated both `.github/workflows/prerelease.yml` and `.github/workflows/release.yml` checksum generation steps to iterate over the `files` array and write `SHA256 basename` lines into `release/SHA256SUMS.txt`.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Resolved the latest CodeRabbit round for PR #49 by fixing checksum generation to emit basename-oriented `SHA256SUMS.txt` entries in both prerelease and release workflows, with matching regression coverage. Verification: `bun test src/prerelease-workflow.test.ts src/release-workflow.test.ts`; `bun run typecheck`.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,44 @@
|
||||
---
|
||||
id: TASK-287
|
||||
title: Restore Lua parser compatibility for mpv plugin modules
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-11 21:25'
|
||||
updated_date: '2026-04-11 21:29'
|
||||
labels:
|
||||
- bug
|
||||
- mpv-plugin
|
||||
- lua
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Users with Lua runtimes that do not accept the current `goto continue` pattern in the mpv plugin should be able to load the plugin without syntax errors. Remove the parser-incompatible control-flow usage from the affected plugin modules without changing plugin behavior.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 The mpv plugin source no longer relies on parser-incompatible `goto continue` labels in the affected Lua modules.
|
||||
- [x] #2 Automated coverage fails on the old parser-incompatible source and passes once the compatibility fix is in place.
|
||||
- [x] #3 Existing plugin start/gate verification still passes after the compatibility fix.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Reused existing local cleanups in `plugin/subminer/hover.lua` and `plugin/subminer/environment.lua` to remove `goto continue` / `::continue::` control flow without behavior changes.
|
||||
|
||||
Added `scripts/test-plugin-lua-compat.lua` and wired it into `test:plugin:src`; the regression checks reject the legacy pattern structurally and verify parse success with `luajit` when available.
|
||||
|
||||
Verification run on 2026-04-11: `lua scripts/test-plugin-lua-compat.lua` ✅, `bun run test:plugin:src` ✅, `bun run changelog:lint` ✅, `bun run typecheck` ✅, `bun run test:env` ✅, `bun run build` ✅, `bun run test:smoke:dist` ✅.
|
||||
|
||||
`bun run test:fast` remains red for unrelated existing immersion-tracker assertions in `src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts` and `src/core/services/immersion-tracker/__tests__/query.test.ts` (`tsMs`/`lastWatchedMs` observed as `-2147483648`).
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Removed parser-incompatible `goto continue` usage from the affected mpv Lua plugin modules, added a dedicated Lua compatibility regression script to the plugin test lane, and added a changelog fragment for the user-visible fix. Requested plugin verification is green; unrelated existing `test:fast` immersion-tracker failures remain outside this task.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,42 @@
|
||||
---
|
||||
id: TASK-288
|
||||
title: Stabilize immersion-tracker CI timestamp handling under libsql/Bun
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-11 21:34'
|
||||
updated_date: '2026-04-11 21:43'
|
||||
labels:
|
||||
- bug
|
||||
- ci
|
||||
- immersion-tracker
|
||||
dependencies: []
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
`bun run test:fast` is currently failing because large millisecond timestamps are not handled safely through the libsql/Bun path. Fix timestamp parsing/storage so lifetime/library and session-event queries return correct wall-clock values in CI and runtime.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 Large wall-clock timestamps round-trip correctly through immersion-tracker lifetime/library queries under the repo's Bun/libsql runtime.
|
||||
- [x] #2 Session-event timestamps round-trip correctly for real wall-clock values used by runtime event inserts.
|
||||
- [x] #3 Targeted immersion-tracker regression coverage passes, and the previously failing `test:fast` lane no longer fails on these timestamp assertions.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Root cause split in two places: Bun/libsql corrupts large millisecond timestamp strings when coerced through `Number(...)`, and `imm_session_events.ts_ms` being `INTEGER` let runtime event inserts/readbacks return `-2147483648` on CI/runtime.
|
||||
|
||||
Fix shipped by parsing timestamp strings without the broken `Number(largeString)` path, migrating `imm_session_events.ts_ms` to `TEXT`, ordering/retention queries via `CAST(ts_ms AS REAL)`, and avoiding `Number(currentMs)` when reusing already-normalized timestamp strings.
|
||||
|
||||
Added regression coverage for both real runtime event inserts and schema migration/repair of previously truncated session-event rows.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Fixed immersion-tracker timestamp handling under Bun/libsql so large wall-clock millisecond values survive runtime writes, query reads, and schema migration. `bun run test:fast`, `bun run typecheck`, `bun run test:env`, `bun run build`, `bun run test:smoke:dist`, and `bun run changelog:lint` all pass after the patch.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,33 @@
|
||||
---
|
||||
id: TASK-289
|
||||
title: Finish current windows-qol rebase
|
||||
status: Done
|
||||
assignee: []
|
||||
created_date: '2026-04-11 22:07'
|
||||
updated_date: '2026-04-11 22:08'
|
||||
labels:
|
||||
- maintenance
|
||||
- rebase
|
||||
dependencies: []
|
||||
references:
|
||||
- /home/sudacode/projects/japanese/SubMiner
|
||||
priority: medium
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Resolve the in-progress rebase on `windows-qol` and ensure the branch lands cleanly.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 Interactive rebase completes without conflicts.
|
||||
- [x] #2 Working tree is clean after the rebase finishes.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Completed the interactive rebase on `windows-qol` and resolved the transient editor-blocked `git rebase --continue` step. Branch now rebased cleanly onto `49e46e6b`.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -0,0 +1,56 @@
|
||||
---
|
||||
id: TASK-290
|
||||
title: Cut stable release v0.12.0 on main
|
||||
status: Done
|
||||
assignee:
|
||||
- codex
|
||||
created_date: '2026-04-12 04:47'
|
||||
updated_date: '2026-04-12 04:51'
|
||||
labels: []
|
||||
dependencies: []
|
||||
documentation:
|
||||
- docs/RELEASING.md
|
||||
priority: high
|
||||
---
|
||||
|
||||
## Description
|
||||
|
||||
<!-- SECTION:DESCRIPTION:BEGIN -->
|
||||
Prepare the main branch for the stable SubMiner v0.12.0 release by applying the release-version updates, formatting changes required by the branch state, and rerunning the full release verification gate.
|
||||
<!-- SECTION:DESCRIPTION:END -->
|
||||
|
||||
## Acceptance Criteria
|
||||
<!-- AC:BEGIN -->
|
||||
- [x] #1 Main branch version and stable release metadata are updated for v0.12.0.
|
||||
- [x] #2 Required formatting changes for the release candidate tree are applied and verified.
|
||||
- [x] #3 The documented release verification gate passes locally and any remaining push or tag prerequisites are documented.
|
||||
<!-- AC:END -->
|
||||
|
||||
## Implementation Plan
|
||||
|
||||
<!-- SECTION:PLAN:BEGIN -->
|
||||
1. Audit main-branch release state: package version, release artifacts, current CI status, and current formatting debt.
|
||||
2. Apply required formatting fixes to the files reported by `bun run format:check:src` and verify the formatting lane passes.
|
||||
3. Update the package version to 0.12.0 and generate stable release metadata (`CHANGELOG.md`, `release/release-notes.md`, `docs-site/changelog.md`) using the documented release workflow.
|
||||
4. Run the full local release gate on main (`changelog:lint`, `changelog:check --version 0.12.0`, `verify:config-example`, `typecheck`, `test:fast`, `test:env`, `build`, `docs:test`, `docs:build`, plus dist smoke) and document any remaining tag/push prerequisites.
|
||||
<!-- SECTION:PLAN:END -->
|
||||
|
||||
## Implementation Notes
|
||||
|
||||
<!-- SECTION:NOTES:BEGIN -->
|
||||
Applied Prettier to all 39 files reported by `bun run format:check:src` on main and verified the formatting lane now passes.
|
||||
|
||||
Reapplied the stable changelog build entrypoint fix on main: added `writeStableReleaseArtifacts`, covered it with a focused regression test, and updated `package.json` so `changelog:build` forwards `--version` and `--date` through a single `build-release` command.
|
||||
|
||||
Verified the formatted mainline release tree with `bun run changelog:lint`, `bun run changelog:check --version 0.12.0`, `bun run verify:config-example`, `bun run typecheck`, `bun run test:fast`, `bun run test:env`, `bun run build`, `bun run docs:test`, `bun run docs:build`, and `bun run test:smoke:dist`; all passed.
|
||||
|
||||
Remote main CI also completed successfully for `Windows update (#49)` after the local release-prep pass. Remaining operational steps are commit/tag/push only.
|
||||
<!-- SECTION:NOTES:END -->
|
||||
|
||||
## Final Summary
|
||||
|
||||
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
|
||||
Prepared `main` for the stable `v0.12.0` cut. Formatted the previously failing source files so `bun run format:check:src` is now clean, bumped `package.json` from `0.12.0-beta.3` to `0.12.0`, and generated the stable release artifacts with the explicit local cut date `2026-04-11`, which consumed the pending changelog fragments into `CHANGELOG.md`, `docs-site/changelog.md`, and `release/release-notes.md`.
|
||||
|
||||
Also reintroduced the release-script fix on main: the old `changelog:build` package script still used `build && docs`, which can drop `--version/--date` on the first step. Added a focused regression test in `scripts/build-changelog.test.ts`, implemented `writeStableReleaseArtifacts` in `scripts/build-changelog.ts`, and switched `package.json` to `build-release` so release flags propagate correctly. Verification on the final tree passed for formatting, changelog lint/check, config example verification, typecheck, fast tests, env tests, build, docs tests/build, dist smoke, and remote main CI. The branch is release-ready pending commit, tag, and push.
|
||||
<!-- SECTION:FINAL_SUMMARY:END -->
|
||||
@@ -1,5 +0,0 @@
|
||||
type: internal
|
||||
area: release
|
||||
|
||||
- Added a dedicated beta/rc prerelease GitHub Actions workflow that publishes GitHub prereleases without consuming pending changelog fragments or updating AUR.
|
||||
- Added prerelease note generation so beta and release-candidate tags can reuse the current pending `changes/*.md` fragments while leaving stable changelog publication for the final release cut.
|
||||
@@ -1,4 +0,0 @@
|
||||
type: fixed
|
||||
area: overlay
|
||||
|
||||
- Fixed overlay drag-and-drop routing so dropping external subtitle files like `.ass` onto mpv still loads them when the overlay is visible.
|
||||
@@ -1,4 +0,0 @@
|
||||
type: fixed
|
||||
area: overlay
|
||||
|
||||
- Addressed the latest CodeRabbit follow-ups on the Windows overlay flow, including exact mpv target resolution, lower-overlay helper arguments, Win32 failure detection, and overlay cleanup on tracker loss.
|
||||
@@ -1,11 +0,0 @@
|
||||
type: fixed
|
||||
area: overlay
|
||||
|
||||
- Fixed Windows overlay z-order so the visible subtitle overlay stops staying above unrelated apps after mpv loses focus.
|
||||
- Fixed Windows overlay tracking to use native window polling and owner/z-order binding, which keeps the subtitle overlay aligned to the active mpv window more reliably.
|
||||
- Fixed Windows overlay hide/restore behavior so minimizing mpv immediately hides the overlay and restoring mpv brings it back on top of the mpv window without requiring a click.
|
||||
- Fixed stats overlay layering so the in-player stats page now stays above mpv and the subtitle overlay while it is open.
|
||||
- Fixed Windows subtitle overlay stability so transient tracker misses and restore events keep the current subtitle visible instead of waiting for the next subtitle line.
|
||||
- Fixed Windows focus handoff from the interactive subtitle overlay back to mpv so the overlay no longer drops behind mpv and briefly disappears.
|
||||
- Fixed Windows visible-overlay startup so it no longer briefly opens as an interactive or opaque surface before the tracked transparent overlay state settles.
|
||||
- Fixed spurious auto-pause after overlay visibility recovery and window resize so the overlay no longer pauses mpv until the pointer genuinely re-enters the subtitle area.
|
||||
@@ -1,4 +0,0 @@
|
||||
type: fixed
|
||||
area: overlay
|
||||
|
||||
- Fixed Windows secondary subtitle hover mode so the expanded hover hit area no longer blocks the native minimize, maximize, and close buttons.
|
||||
@@ -1,4 +0,0 @@
|
||||
type: fixed
|
||||
area: overlay
|
||||
|
||||
- Fixed Windows Yomitan popup focus loss after closing nested lookups so the original popup stays interactive instead of falling through to mpv.
|
||||
@@ -1,6 +0,0 @@
|
||||
type: changed
|
||||
area: overlay
|
||||
|
||||
- Added configurable overlay shortcuts for session help, controller select, and controller debug actions.
|
||||
- Added mpv/plugin and CLI routing for session help, controller utilities, and subtitle sidebar toggling through the shared session-action path.
|
||||
- Improved dedicated overlay modal retry and focus handling for runtime options, Jimaku, session help, controller tools, and the playlist browser.
|
||||
@@ -177,7 +177,7 @@
|
||||
"openSessionHelp": "CommandOrControl+Shift+H", // Open session help setting.
|
||||
"openControllerSelect": "Alt+C", // Open controller select setting.
|
||||
"openControllerDebug": "Alt+Shift+C", // Open controller debug setting.
|
||||
"toggleSubtitleSidebar": "\\" // Toggle subtitle sidebar setting.
|
||||
"toggleSubtitleSidebar": "Backslash" // Toggle subtitle sidebar setting.
|
||||
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
||||
|
||||
// ==========================================
|
||||
|
||||
@@ -1,6 +1,49 @@
|
||||
# Changelog
|
||||
|
||||
## v0.11.2 (2026-04-07)
|
||||
## v0.12.0 (2026-04-11)
|
||||
|
||||
**Changed**
|
||||
- Overlay: Added configurable overlay shortcuts for session help, controller select, and controller debug actions.
|
||||
- Overlay: Added mpv/plugin and CLI routing for session help, controller utilities, and subtitle sidebar toggling through the shared session-action path.
|
||||
- Overlay: Improved dedicated overlay modal retry and focus handling for runtime options, Jimaku, session help, controller tools, and the playlist browser.
|
||||
- Overlay: Fixed controller configuration and controller debug shortcut opens so configured bindings bring up their modals again instead of tripping renderer recovery.
|
||||
- Stats: Sessions are rolled up per episode within each day, with a bulk delete that wipes every session in the group.
|
||||
- Stats: Trends add a 365-day range next to the existing 7d/30d/90d/all options.
|
||||
- Stats: Library detail view gets a delete-episode action that removes the video and all its sessions.
|
||||
- Stats: Vocabulary Top 50 tightens the word/reading column so katakana entries no longer push the scores off screen.
|
||||
- Stats: Episode detail hides card events whose Anki notes have been deleted, instead of showing phantom mining activity.
|
||||
- Stats: Trend and watch-time charts share a unified theme with horizontal gridlines and larger ticks for legibility.
|
||||
- Stats: Overview, Library, Trends, Sessions, and Vocabulary now use generic "title" wording so YouTube videos and anime live comfortably side by side in the dashboard.
|
||||
- Stats: Session timeline no longer plots seek-forward/seek-backward markers — they were too noisy on sessions with lots of rewinds.
|
||||
- Stats: Replaced the "Library — Per Day" section on the Stats → Trends page with a "Library — Summary" section. The new section shows a top-10 watch-time leaderboard chart and a sortable per-title table (watch time, videos, sessions, cards, words, lookups, lookups/100w, date range), all scoped to the current date range selector.
|
||||
|
||||
**Fixed**
|
||||
- Overlay: Fixed overlay drag-and-drop routing so dropping external subtitle files like `.ass` onto mpv still loads them when the overlay is visible.
|
||||
- Overlay: Addressed the latest CodeRabbit follow-ups on PR #49, including generation-scoped Lua session binding names, stricter session command validation, session-help shortcut visibility, the numeric-selection key guard, stats-overlay startup classification, and safer session-binding persistence.
|
||||
- Overlay: Addressed the latest CodeRabbit follow-ups on the Windows overlay flow, including exact mpv target resolution, lower-overlay helper arguments, Win32 failure detection, and overlay cleanup on tracker loss.
|
||||
- Overlay: Fixed Windows overlay z-order so the visible subtitle overlay stops staying above unrelated apps after mpv loses focus.
|
||||
- Overlay: Fixed Windows overlay tracking to use native window polling and owner/z-order binding, which keeps the subtitle overlay aligned to the active mpv window more reliably.
|
||||
- Overlay: Fixed Windows overlay hide/restore behavior so minimizing mpv immediately hides the overlay and restoring mpv brings it back on top of the mpv window without requiring a click.
|
||||
- Overlay: Fixed stats overlay layering so the in-player stats page now stays above mpv and the subtitle overlay while it is open.
|
||||
- Overlay: Fixed Windows subtitle overlay stability so transient tracker misses and restore events keep the current subtitle visible instead of waiting for the next subtitle line.
|
||||
- Overlay: Fixed Windows focus handoff from the interactive subtitle overlay back to mpv so the overlay no longer drops behind mpv and briefly disappears.
|
||||
- Overlay: Fixed Windows visible-overlay startup so it no longer briefly opens as an interactive or opaque surface before the tracked transparent overlay state settles.
|
||||
- Overlay: Fixed spurious auto-pause after overlay visibility recovery and window resize so the overlay no longer pauses mpv until the pointer genuinely re-enters the subtitle area.
|
||||
- Overlay: Fixed Windows secondary subtitle hover mode so the expanded hover hit area no longer blocks the native minimize, maximize, and close buttons.
|
||||
- Overlay: Fixed Windows Yomitan popup focus loss after closing nested lookups so the original popup stays interactive instead of falling through to mpv.
|
||||
- Stats: Fixed immersion-tracker timestamp handling under Bun/libsql so library rows, session timelines, and lifetime summaries keep real wall-clock millisecond values instead of truncating to invalid negative timestamps.
|
||||
- Mpv Plugin: Fixed the mpv Lua plugin so hover and environment modules no longer use the `goto continue` pattern that can fail to parse on some user Lua runtimes.
|
||||
|
||||
**Internal**
|
||||
- Release: Added a dedicated beta/rc prerelease GitHub Actions workflow that publishes GitHub prereleases without consuming pending changelog fragments or updating AUR.
|
||||
- Release: Added prerelease note generation so beta and release-candidate tags can reuse the current pending `changes/*.md` fragments while leaving stable changelog publication for the final release cut.
|
||||
|
||||
## Previous Versions
|
||||
|
||||
<details>
|
||||
<summary>v0.11.x</summary>
|
||||
|
||||
<h2>v0.11.2 (2026-04-07)</h2>
|
||||
|
||||
**Changed**
|
||||
- Launcher: Replaced the launcher-only fullscreen toggle with `mpv.launchMode` so SubMiner-managed mpv playback can start in normal, maximized, or fullscreen mode.
|
||||
@@ -10,13 +53,13 @@
|
||||
- Launcher: Local playback now promotes a single unlabeled external subtitle sidecar to the primary slot instead of leaving mpv's embedded English auto-selection in place.
|
||||
- Release: Fixed Linux AppImage startup packaging so Chromium child relaunches can resolve the bundled `libffmpeg.so` instead of crash-looping on startup.
|
||||
|
||||
## v0.11.1 (2026-04-04)
|
||||
<h2>v0.11.1 (2026-04-04)</h2>
|
||||
|
||||
**Fixed**
|
||||
- Release: Linux packaged builds now expose the canonical `SubMiner` app identity to Electron's startup metadata so native Wayland compositors stop reporting the window class/app-id as lowercase `subminer`.
|
||||
- Linux: Linux now restores the runtime options, Jimaku, and Subsync shortcuts after the Electron 39 regression by routing those actions through the overlay's mpv/IPC shortcut path.
|
||||
|
||||
## v0.11.0 (2026-04-03)
|
||||
<h2>v0.11.0 (2026-04-03)</h2>
|
||||
|
||||
**Added**
|
||||
- Overlay: Added a playlist browser overlay modal for browsing sibling video files and the live mpv queue during playback.
|
||||
@@ -69,7 +112,7 @@
|
||||
- Release: Kept GitHub Releases green when AUR publish flakes and needs manual follow-up.
|
||||
- Release: Updated Electron to 39.8.6 and pinned patched transitive build dependencies to clear the reported high-severity audit findings.
|
||||
|
||||
## Previous Versions
|
||||
</details>
|
||||
|
||||
<details>
|
||||
<summary>v0.10.x</summary>
|
||||
|
||||
@@ -540,7 +540,7 @@ See `config.example.jsonc` for detailed configuration options.
|
||||
"openControllerSelect": "Alt+C",
|
||||
"openControllerDebug": "Alt+Shift+C",
|
||||
"openJimaku": "Ctrl+Shift+J",
|
||||
"toggleSubtitleSidebar": "\\",
|
||||
"toggleSubtitleSidebar": "Backslash",
|
||||
"multiCopyTimeoutMs": 3000
|
||||
}
|
||||
}
|
||||
@@ -564,7 +564,7 @@ See `config.example.jsonc` for detailed configuration options.
|
||||
| `openControllerSelect` | string \| `null` | Opens the controller config/remap modal (default: `"Alt+C"`) |
|
||||
| `openControllerDebug` | string \| `null` | Opens the controller debug modal (default: `"Alt+Shift+C"`) |
|
||||
| `openJimaku` | string \| `null` | Opens the Jimaku search modal (default: `"Ctrl+Shift+J"`) |
|
||||
| `toggleSubtitleSidebar` | string \| `null` | Dispatches the subtitle sidebar toggle action (default: `"\\"`). `subtitleSidebar.toggleKey` remains the primary bare-key setting. |
|
||||
| `toggleSubtitleSidebar` | string \| `null` | Dispatches the subtitle sidebar toggle action (default: `"Backslash"`). `subtitleSidebar.toggleKey` remains the primary bare-key setting. |
|
||||
|
||||
**See `config.example.jsonc`** for the complete list of shortcut configuration options.
|
||||
|
||||
|
||||
@@ -177,7 +177,7 @@
|
||||
"openSessionHelp": "CommandOrControl+Shift+H", // Open session help setting.
|
||||
"openControllerSelect": "Alt+C", // Open controller select setting.
|
||||
"openControllerDebug": "Alt+Shift+C", // Open controller debug setting.
|
||||
"toggleSubtitleSidebar": "\\" // Toggle subtitle sidebar setting.
|
||||
"toggleSubtitleSidebar": "Backslash" // Toggle subtitle sidebar setting.
|
||||
}, // Overlay keyboard shortcuts. Set a shortcut to null to disable.
|
||||
|
||||
// ==========================================
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
1609
docs/superpowers/plans/2026-04-09-stats-dashboard-feedback-pass.md
Normal file
1609
docs/superpowers/plans/2026-04-09-stats-dashboard-feedback-pass.md
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,184 @@
|
||||
# Library Summary Replaces Per-Day Trends — Design
|
||||
|
||||
**Status:** Draft
|
||||
**Date:** 2026-04-09
|
||||
**Scope:** `stats/` frontend, `src/core/services/immersion-tracker/query-trends.ts` backend
|
||||
|
||||
## Problem
|
||||
|
||||
The "Library — Per Day" section on the stats Trends tab (`stats/src/components/trends/TrendsTab.tsx:224-254`) renders six stacked-area charts — Videos, Watch Time, Cards, Words, Lookups, and Lookups/100w, each broken down per title per day.
|
||||
|
||||
In practice these charts are not useful:
|
||||
|
||||
- Most titles only have activity on one or two days in a window, so they render as isolated bumps on a noisy baseline.
|
||||
- Stacking 7+ titles with mostly-zero days makes individual lines hard to follow.
|
||||
- The top "Activity" and "Period Trends" sections already answer "what am I doing per day" globally.
|
||||
- The "Library — Cumulative" section directly below already answers "which titles am I progressing through" with less noise.
|
||||
|
||||
The per-day section occupies significant vertical space without carrying its weight, and the user has confirmed it should be replaced.
|
||||
|
||||
## Goal
|
||||
|
||||
Replace the six per-day stacked charts with a single "Library — Summary" section that surfaces per-title aggregate statistics over the selected date range. The new view should make it trivially easy to answer: "For the selected window, which titles am I spending time on, how much mining output have they produced, and how efficient is my lookup rate on each?"
|
||||
|
||||
## Non-goals
|
||||
|
||||
- Changing the "Library — Cumulative" section (stays as-is).
|
||||
- Changing the "Activity", "Period Trends", or "Patterns" sections.
|
||||
- Adding a new API endpoint — the existing dashboard endpoint is extended in place.
|
||||
- Renaming internal `anime*` data-model identifiers (`animeId`, `imm_anime`, etc.). Those stay per the convention established in `c5e778d7`; only new fields/types/user-visible strings use generic "title"/"library" wording.
|
||||
- Supporting a true all-time library view on the Trends tab. If that's ever wanted, it belongs on a different tab.
|
||||
|
||||
## Solution Overview
|
||||
|
||||
Delete the "Library — Per Day" section. In its place, add "Library — Summary", composed of:
|
||||
|
||||
1. A horizontal-bar leaderboard chart of watch time per title (top 10, descending).
|
||||
2. A sortable table of every title with activity in the selected window, with columns: Title, Watch Time, Videos, Sessions, Cards, Words, Lookups, Lookups/100w, Date Range.
|
||||
|
||||
Both controls are scoped to the top-of-page date range selector. The existing shared Anime Visibility filter continues to work — it now gates Summary + Cumulative instead of Per-Day + Cumulative.
|
||||
|
||||
## Backend
|
||||
|
||||
### New type
|
||||
|
||||
Add to `stats/src/types/stats.ts` and the backend query module:
|
||||
|
||||
```ts
|
||||
type LibrarySummaryRow = {
|
||||
title: string; // display title — anime series, YouTube video title, etc.
|
||||
watchTimeMin: number; // sum(total_active_min) across the window
|
||||
videos: number; // distinct video_id count
|
||||
sessions: number; // session count from imm_sessions
|
||||
cards: number; // sum(total_cards)
|
||||
words: number; // sum(total_tokens_seen)
|
||||
lookups: number; // sum(lookup_count) from imm_sessions
|
||||
lookupsPerHundred: number | null; // lookups / words * 100, null when words == 0
|
||||
firstWatched: number; // min(rollup_day) as epoch day, within the window
|
||||
lastWatched: number; // max(rollup_day) as epoch day, within the window
|
||||
};
|
||||
```
|
||||
|
||||
### Query changes in `src/core/services/immersion-tracker/query-trends.ts`
|
||||
|
||||
- Add `librarySummary: LibrarySummaryRow[]` to `TrendsDashboardQueryResult`.
|
||||
- Populate it from a single aggregating query over `imm_daily_rollups` joined to `imm_videos` → `imm_anime`, filtered by `rollup_day` within the selected window. Session count and lookup count come from `imm_sessions` aggregated by `video_id` and then grouped by the parent library entry. Use a single query (or at most two joined/unioned) — no N+1.
|
||||
- `imm_anime` is the generic library-grouping table; anime series, YouTube videos, and yt-dlp imports all land there. The internal table name stays `imm_anime`; only the new field uses generic naming.
|
||||
- Return rows pre-sorted by `watchTimeMin` descending so the leaderboard is zero-cost and the table default sort matches.
|
||||
- Emit `lookupsPerHundred: null` when `words == 0`.
|
||||
|
||||
### Removed from API response
|
||||
|
||||
Drop the entire `animePerDay` field from `TrendsDashboardQueryResult` (both backend in `src/core/services/immersion-tracker/query-trends.ts` and frontend in `stats/src/types/stats.ts`).
|
||||
|
||||
Internally, the existing helpers (`buildPerAnimeFromDailyRollups`, `buildEpisodesPerAnimeFromDailyRollups`) are still used as intermediates to build `animeCumulative.*` via `buildCumulativePerAnime`. Keep those helpers — just scope their output to local variables inside `getTrendsDashboard` instead of exposing them on the response. The `buildPerAnimeFromSessions` call for lookups and the `buildLookupsPerHundredPerAnime` helper become unused and can be deleted.
|
||||
|
||||
Before removing `animePerDay` from the frontend type, verify no other file under `stats/src/` references it. Based on current inspection, only `TrendsTab.tsx` and `stats/src/types/stats.ts` touch it.
|
||||
|
||||
## Frontend
|
||||
|
||||
### New component: `stats/src/components/trends/LibrarySummarySection.tsx`
|
||||
|
||||
Owns the header, leaderboard chart, visibility-filtered data, and the table. Keeps `TrendsTab.tsx` from growing. Component props: `{ rows: LibrarySummaryRow[]; hiddenTitles: ReadonlySet<string>; windowStart: Date; windowEnd: Date }`.
|
||||
|
||||
Internal state: `useState<{ column: ColumnId; direction: 'asc' | 'desc' }>` for sort, defaulting to `{ column: 'watchTimeMin', direction: 'desc' }`.
|
||||
|
||||
### Layout
|
||||
|
||||
Replaces `TrendsTab.tsx:224-254`:
|
||||
|
||||
```
|
||||
[SectionHeader: "Library — Summary"]
|
||||
[AnimeVisibilityFilter — unchanged, shared with Cumulative below]
|
||||
[Card, col-span-full: Leaderboard — horizontal bar chart, ~260px tall]
|
||||
[Card, col-span-full: Sortable table, auto height up to ~480px with internal scroll]
|
||||
```
|
||||
|
||||
Both cards use the existing chart/card wrapper styling.
|
||||
|
||||
### Leaderboard chart
|
||||
|
||||
- Recharts horizontal bar chart (matches the rest of the page — existing charts use `recharts`, not ECharts).
|
||||
- Top 10 titles by watch time. If fewer titles have activity, render what's there.
|
||||
- Y-axis: title (category), truncated with ellipsis at container width; full title visible in the Recharts tooltip.
|
||||
- X-axis: minutes (number).
|
||||
- Use `layout="vertical"` with `YAxis dataKey="title" type="category"` and `XAxis type="number"`.
|
||||
- Single series color: `#8aadf4` (matching the existing Watch Time color).
|
||||
- Reuse `CHART_DEFAULTS`, `CHART_THEME`, `TOOLTIP_CONTENT_STYLE` from `stats/src/lib/chart-theme.ts` so theming matches the rest of the dashboard.
|
||||
- Chart order is fixed at watch-time desc regardless of table sort — the leaderboard's meaning is fixed.
|
||||
|
||||
### Table
|
||||
|
||||
- Plain HTML `<table>` with Tailwind classes. No new deps.
|
||||
- Columns, in order:
|
||||
1. **Title** — left-aligned, sticky, truncated with ellipsis, full title on hover.
|
||||
2. **Watch Time** — formatted `Xh Ym` when ≥60 min, else `Xm`.
|
||||
3. **Videos** — integer.
|
||||
4. **Sessions** — integer.
|
||||
5. **Cards** — integer.
|
||||
6. **Words** — integer.
|
||||
7. **Lookups** — integer.
|
||||
8. **Lookups/100w** — one decimal place, `—` when null.
|
||||
9. **Date Range** — `Mon D → Mon D` using the title's `firstWatched` / `lastWatched` within the window.
|
||||
- Click a column header to sort; click again to reverse. Visual arrow on the active column.
|
||||
- Numeric columns right-aligned.
|
||||
- Null `lookupsPerHundred` sorts as the lowest value in both directions (consistent with "no data").
|
||||
- Row hover highlight; no row click action (read-only view).
|
||||
- Empty state: "No library activity in the selected window."
|
||||
|
||||
### Visibility filter integration
|
||||
|
||||
Hiding a title via `AnimeVisibilityFilter` removes it from both the leaderboard and the table. The filter's set of available titles is built from the union of titles that appear in `librarySummary` and the existing `animeCumulative.*` arrays (matches current behavior in `buildAnimeVisibilityOptions`).
|
||||
|
||||
### `TrendsTab.tsx` changes
|
||||
|
||||
- Remove the `filteredEpisodesPerAnime`, `filteredWatchTimePerAnime`, `filteredCardsPerAnime`, `filteredWordsPerAnime`, `filteredLookupsPerAnime`, `filteredLookupsPerHundredPerAnime` locals.
|
||||
- Remove the six `<StackedTrendChart>` calls in the "Library — Per Day" section.
|
||||
- Remove the `<SectionHeader>Library — Per Day</SectionHeader>` and the `<AnimeVisibilityFilter>` from that position.
|
||||
- Insert `<SectionHeader>Library — Summary</SectionHeader>` + `<AnimeVisibilityFilter>` + `<LibrarySummarySection>` in the same place.
|
||||
- Update `buildAnimeVisibilityOptions` input to use `librarySummary` titles instead of the six dropped `animePerDay.*` arrays.
|
||||
|
||||
## Data flow
|
||||
|
||||
1. `useTrends(range, groupBy)` calls `/api/stats/trends/dashboard`.
|
||||
2. Response now includes `librarySummary` (sorted by watch time desc).
|
||||
3. `TrendsTab` holds the shared `hiddenAnime` set (unchanged).
|
||||
4. `LibrarySummarySection` receives `librarySummary` + `hiddenAnime`, filters out hidden rows, renders the leaderboard from the top-10 slice of the filtered list, renders the table from the filtered list with local sort state applied.
|
||||
5. Date-range selector changes trigger a new fetch; `groupBy` toggle does not affect the summary section (it's always window-total).
|
||||
|
||||
## Edge cases
|
||||
|
||||
- **No activity in window:** Section renders header + empty-state card. Leaderboard card hidden. Visibility filter hidden.
|
||||
- **One title only:** Leaderboard renders a single bar; table renders one row. No special-casing.
|
||||
- **Title with zero words but non-zero lookups:** `lookupsPerHundred` is `null`, rendered as `—`. Sort treats null as lowest.
|
||||
- **Title with zero cards/lookups/words but non-zero watch time:** Normal zero rendering, still shown.
|
||||
- **Very long titles:** Ellipsis in chart y-axis labels and table title column; full title in `title` attribute / ECharts tooltip.
|
||||
- **Mixed sources (anime + YouTube):** No special case — both land in `imm_anime` and are grouped uniformly.
|
||||
|
||||
## Testing
|
||||
|
||||
### Backend (`query-trends.ts`)
|
||||
|
||||
New unit tests, following the existing pattern:
|
||||
|
||||
1. Empty window returns `librarySummary: []`.
|
||||
2. Single title with a few rollups: all aggregates are correct; `firstWatched`/`lastWatched` match the bounding days within the window.
|
||||
3. Multiple titles: rows returned sorted by watch time desc.
|
||||
4. Mixed sources (anime-style + YouTube-style entries in `imm_anime`): both appear in the summary with their own aggregates.
|
||||
5. Title with `words == 0`: `lookupsPerHundred` is `null`.
|
||||
6. Date range excludes some rollups: excluded rollups are not counted; `firstWatched`/`lastWatched` reflect only within-window activity.
|
||||
7. `sessions` and `lookups` come from `imm_sessions`, not `imm_daily_rollups`, and are correctly attributed to the parent library entry.
|
||||
|
||||
### Frontend
|
||||
|
||||
- Existing Trends tab smoke test should continue to pass after wiring.
|
||||
- Optional: a targeted render test for `LibrarySummarySection` (empty state, single title, sort toggle, visibility filter interaction). Not required for merge if the smoke test exercises the happy path.
|
||||
|
||||
## Release / docs
|
||||
|
||||
- One fragment in `changes/*.md` summarizing the replacement.
|
||||
- No user-facing docs (`docs-site/`) changes unless the per-day section was documented there — verify during implementation.
|
||||
|
||||
## Open items
|
||||
|
||||
None.
|
||||
@@ -0,0 +1,347 @@
|
||||
# Stats Dashboard Feedback Pass — Design
|
||||
|
||||
Date: 2026-04-09
|
||||
Scope: Stats dashboard UX follow-ups from user feedback (items 1–7).
|
||||
Delivery: **Single PR**, broken into logically scoped commits.
|
||||
|
||||
## Goals
|
||||
|
||||
Address seven concrete pieces of feedback against the Statistics menu:
|
||||
|
||||
1. Library — collapse episodes behind a per-series dropdown.
|
||||
2. Sessions — roll up multiple sessions of the same episode within a day.
|
||||
3. Trends — add a 365d range option.
|
||||
4. Library — delete an episode (video) from its detail view.
|
||||
5. Vocabulary — tighten spacing between word and reading in the Top 50 table.
|
||||
6. Episode detail — hide cards whose Anki notes have been deleted.
|
||||
7. Trend/watch charts — add gridlines, fix tick legibility, unify theming.
|
||||
|
||||
Out of scope for this pass: English-token ingestion cleanup and Overview stat-card drill-downs (feedback items 8 and 9). Those require a larger design decision and a migration respectively.
|
||||
|
||||
## Files touched (inventory)
|
||||
|
||||
Dashboard (`stats/src/`):
|
||||
- `components/library/LibraryTab.tsx` — collapsible groups (item 1).
|
||||
- `components/library/MediaDetailView.tsx`, `components/library/MediaHeader.tsx` — delete-episode action (item 4).
|
||||
- `components/sessions/SessionsTab.tsx`, `components/library/MediaSessionList.tsx` — episode rollup (item 2).
|
||||
- `components/trends/DateRangeSelector.tsx`, `hooks/useTrends.ts`, `lib/api-client.ts`, `lib/api-client.test.ts` — 365d (item 3).
|
||||
- `components/vocabulary/FrequencyRankTable.tsx` — word/reading column collapse (item 5).
|
||||
- `components/anime/EpisodeDetail.tsx` — filter deleted Anki cards (item 6).
|
||||
- `components/trends/TrendChart.tsx`, `components/trends/StackedTrendChart.tsx`, `components/overview/WatchTimeChart.tsx`, `lib/chart-theme.ts` — chart clarity (item 7).
|
||||
- New file: `stats/src/lib/session-grouping.ts` + `session-grouping.test.ts`.
|
||||
|
||||
Backend (`src/core/services/`):
|
||||
- `immersion-tracker/query-trends.ts` — extend `TrendRange` and `TREND_DAY_LIMITS` (item 3).
|
||||
- `immersion-tracker/__tests__/query.test.ts` — 365d coverage (item 3).
|
||||
- `stats-server.ts` — passthrough if range validation lives here (check before editing).
|
||||
- `__tests__/stats-server.test.ts` — 365d coverage (item 3).
|
||||
|
||||
## Commit plan
|
||||
|
||||
One PR, one feature per commit. Order picks low-risk mechanical changes first so failures in later commits don't block merging of earlier ones.
|
||||
|
||||
1. `feat(stats): add 365d range to trends dashboard` (item 3)
|
||||
2. `fix(stats): tighten word/reading column in Top 50 table` (item 5)
|
||||
3. `fix(stats): hide cards deleted from Anki in episode detail` (item 6)
|
||||
4. `feat(stats): delete episode from library detail view` (item 4)
|
||||
5. `feat(stats): collapsible series groups in library` (item 1)
|
||||
6. `feat(stats): roll up same-episode sessions within a day` (item 2)
|
||||
7. `feat(stats): gridlines and unified theme for trend charts` (item 7)
|
||||
|
||||
Each commit must pass `bun run typecheck`, `bun run test:fast`, and any change-specific checks listed below.
|
||||
|
||||
---
|
||||
|
||||
## Item 1 — Library collapsible series groups
|
||||
|
||||
### Current behavior
|
||||
|
||||
`LibraryTab.tsx` groups media via `groupMediaLibraryItems` and always renders the full grid of `MediaCard`s beneath each group header.
|
||||
|
||||
### Target behavior
|
||||
|
||||
Each group header becomes clickable. Groups with `items.length > 1` default to **collapsed**; single-video groups stay expanded (collapsing them would be visual noise).
|
||||
|
||||
### Implementation
|
||||
|
||||
- State: `const [collapsedGroups, setCollapsedGroups] = useState<Set<string>>(...)`. Initialize from `grouped` where `items.length > 1`.
|
||||
- Toggle helper: `toggleGroup(key: string)` adds/removes from the set.
|
||||
- Group header: wrap in a `<button>` with `aria-expanded` and a chevron icon (`▶`/`▼`). Keep the existing cover + title + subtitle layout inside the button.
|
||||
- Children grid is conditionally rendered on `!collapsedGroups.has(group.key)`.
|
||||
- Header summary (`N videos · duration · cards`) stays visible in both states so collapsed groups remain informative.
|
||||
|
||||
### Tests
|
||||
|
||||
- New `LibraryTab.test.tsx` (if not already present — check first) covering:
|
||||
- Multi-video group renders collapsed on first mount.
|
||||
- Single-video group renders expanded on first mount.
|
||||
- Clicking the header toggles visibility.
|
||||
- Header summary is visible in both states.
|
||||
|
||||
---
|
||||
|
||||
## Item 2 — Sessions episode rollup within a day
|
||||
|
||||
### Current behavior
|
||||
|
||||
`SessionsTab.tsx:10-24` groups sessions by day label only (`formatSessionDayLabel(startedAtMs)`). Multiple sessions of the same episode on the same day show as independent rows. `MediaSessionList.tsx` has the same problem inside the library detail view.
|
||||
|
||||
### Target behavior
|
||||
|
||||
Within each day, sessions with the same `videoId` collapse into one parent row showing combined totals. A chevron reveals the individual sessions. Single-session buckets render flat (no pointless nesting).
|
||||
|
||||
### Implementation
|
||||
|
||||
- New helper in `stats/src/lib/session-grouping.ts`:
|
||||
```ts
|
||||
export interface SessionBucket {
|
||||
key: string; // videoId as string, or `s-${sessionId}` for singletons
|
||||
videoId: number | null;
|
||||
sessions: SessionSummary[];
|
||||
totalActiveMs: number;
|
||||
totalCardsMined: number;
|
||||
representativeSession: SessionSummary; // most recent, for header display
|
||||
}
|
||||
export function groupSessionsByVideo(sessions: SessionSummary[]): SessionBucket[];
|
||||
```
|
||||
Sessions missing a `videoId` become singleton buckets.
|
||||
|
||||
- `SessionsTab.tsx`: after day grouping, pipe each `daySessions` through `groupSessionsByVideo`. Render each bucket:
|
||||
- `sessions.length === 1`: existing `SessionRow` behavior, unchanged.
|
||||
- `sessions.length >= 2`: render a **bucket row** that looks like `SessionRow` but shows combined totals and session count (e.g. `3 sessions · 1h 24m · 12 cards`). Chevron state stored in a second `Set<string>` on bucket key. Expanded buckets render the child `SessionRow`s indented (`pl-8`) beneath the header.
|
||||
- `MediaSessionList.tsx`: within the media detail view, a single video's sessions are all the same `videoId` by definition — grouping here is by day only, and within a day multiple sessions render nested under a day header. Re-use the same visual pattern; factor the bucket row into a shared `SessionBucketRow` component.
|
||||
|
||||
### Delete semantics
|
||||
|
||||
- Deleting a bucket header offers "Delete all N sessions in this group" (reuse `confirmDayGroupDelete` pattern with a bucket-specific message, or add `confirmBucketDelete`).
|
||||
- Deleting an individual session from inside an expanded bucket keeps the existing single-delete flow.
|
||||
|
||||
### Tests
|
||||
|
||||
- `session-grouping.test.ts`:
|
||||
- Empty input → empty output.
|
||||
- All unique videos → N singleton buckets.
|
||||
- Two sessions same videoId → one bucket with correct totals and representative (most recent start time).
|
||||
- Missing videoId → singleton bucket keyed by sessionId.
|
||||
- `SessionsTab.test.tsx` (extend or add) verifying the rendered bucket rows expand/collapse and delete hooks fire with the right ID set.
|
||||
|
||||
---
|
||||
|
||||
## Item 3 — 365d trends range
|
||||
|
||||
### Backend
|
||||
|
||||
`src/core/services/immersion-tracker/query-trends.ts`:
|
||||
- `type TrendRange = '7d' | '30d' | '90d' | '365d' | 'all';`
|
||||
- Add `'365d': 365` to `TREND_DAY_LIMITS`.
|
||||
- `getTrendDayLimit` picks up the new key automatically because of the `Exclude<TrendRange, 'all'>` generic.
|
||||
|
||||
`src/core/services/stats-server.ts`:
|
||||
- Search for any hardcoded range validation (e.g. allow-list in the trends route handler) and extend it.
|
||||
|
||||
### Frontend
|
||||
|
||||
- `hooks/useTrends.ts`: widen the `TimeRange` union.
|
||||
- `components/trends/DateRangeSelector.tsx`: add `'365d'` to the options list. Display label stays as `365d`.
|
||||
- `lib/api-client.ts` / `api-client.test.ts`: if the client validates ranges, add `365d`.
|
||||
|
||||
### Tests
|
||||
|
||||
- `query.test.ts`: extend the existing range table to cover `365d` returning 365 days of data.
|
||||
- `stats-server.test.ts`: ensure the route accepts `range=365d`.
|
||||
- `api-client.test.ts`: ensure the client emits the new range.
|
||||
|
||||
### Change-specific checks
|
||||
|
||||
- `bun run test:config` is not required here (no schema/defaults change).
|
||||
- Run `bun run typecheck` + `bun run test:fast`.
|
||||
|
||||
---
|
||||
|
||||
## Item 4 — Delete episode from library detail
|
||||
|
||||
### Current behavior
|
||||
|
||||
`MediaDetailView.tsx` provides session-level delete only. The backend `deleteVideo` exists (`query-maintenance.ts:509`), the API is exposed at `stats-server.ts:559`, and `api-client.deleteVideo` is already wired (`stats/src/lib/api-client.ts:146`). `EpisodeList.tsx:46` already uses it from the anime tab.
|
||||
|
||||
### Target behavior
|
||||
|
||||
A "Delete Episode" action in `MediaHeader` (top-right, small, `text-ctp-red`), gated by `confirmEpisodeDelete(title)`. On success, call `onBack()` and make sure the parent `LibraryTab` refetches.
|
||||
|
||||
### Implementation
|
||||
|
||||
- Add an `onDeleteEpisode?: () => void` prop to `MediaHeader` and render the button only if provided.
|
||||
- In `MediaDetailView`:
|
||||
- New handler `handleDeleteEpisode` that calls `apiClient.deleteVideo(videoId)`, then `onBack()`.
|
||||
- Reuse `confirmEpisodeDelete` from `stats/src/lib/delete-confirm.ts`.
|
||||
- In `LibraryTab`:
|
||||
- `useMediaLibrary` returns fresh data on mount. The simplest fix: pass a `refresh` function from the hook (extend the hook if it doesn't already expose one) and call it when the detail view signals back.
|
||||
- Alternative: force a remount by incrementing a `libraryVersion` key on the library list. Prefer `refresh` for clarity.
|
||||
|
||||
### Tests
|
||||
|
||||
- Extend the existing `MediaDetailView.test.tsx`: mock `apiClient.deleteVideo`, click the new button, confirm `onBack` fires after success.
|
||||
- `useMediaLibrary.test.ts`: if we add a `refresh` method, cover it.
|
||||
|
||||
---
|
||||
|
||||
## Item 5 — Vocabulary word/reading column collapse
|
||||
|
||||
### Current behavior
|
||||
|
||||
`FrequencyRankTable.tsx:110-144` uses a 5-column table: `Rank | Word | Reading | POS | Seen`. Word and Reading are auto-sized, producing a large gap.
|
||||
|
||||
### Target behavior
|
||||
|
||||
Merge Word + Reading into a single column titled "Word". Reading sits immediately after the headword in a muted, smaller style.
|
||||
|
||||
### Implementation
|
||||
|
||||
- Drop the `<th>Reading</th>` header and cell.
|
||||
- Word cell becomes:
|
||||
```tsx
|
||||
<td className="py-1.5 pr-3">
|
||||
<span className="text-ctp-text font-medium">{w.headword}</span>
|
||||
{reading && (
|
||||
<span className="text-ctp-subtext0 text-xs ml-1.5">
|
||||
【{reading}】
|
||||
</span>
|
||||
)}
|
||||
</td>
|
||||
```
|
||||
where `reading = fullReading(w.headword, w.reading)` and differs from `headword`.
|
||||
- Keep `fullReading` import from `reading-utils`.
|
||||
|
||||
### Tests
|
||||
|
||||
- Extend `FrequencyRankTable.test.tsx` (if present — otherwise add a focused test) to assert:
|
||||
- Headword renders.
|
||||
- Reading renders when different from headword.
|
||||
- Reading does not render when equal to headword.
|
||||
|
||||
---
|
||||
|
||||
## Item 6 — Hide Anki-deleted cards in Cards Mined
|
||||
|
||||
### Current behavior
|
||||
|
||||
`EpisodeDetail.tsx:109-147` iterates `cardEvents`, fetches note info via `ankiNotesInfo(allNoteIds)`, and for each `noteId` renders a row even if no matching `info` came back — the user sees an empty word with an "Open in Anki" button that leads nowhere.
|
||||
|
||||
### Target behavior
|
||||
|
||||
After `ankiNotesInfo` resolves:
|
||||
- Drop `noteId`s that are not in the resolved map.
|
||||
- Drop `cardEvents` whose `noteIds` list was non-empty but is now empty after filtering.
|
||||
- Card events with a positive `cardsDelta` but no `noteIds` (legacy rollup path) still render as `+N cards` — we have no way to cross-reference them, so leave them alone.
|
||||
|
||||
### Implementation
|
||||
|
||||
- Compute `filteredCardEvents` as a `useMemo` depending on `data.cardEvents` and `noteInfos`.
|
||||
- Iterate `filteredCardEvents` instead of `cardEvents` in the render.
|
||||
- Surface a subtle note (optional, muted) "N cards hidden (deleted from Anki)" at the end of the list if any were filtered — helps the user understand why counts here diverge from session totals. Final decision on the note can be made at PR review; default: **show it**.
|
||||
|
||||
### Tests
|
||||
|
||||
- Add a test in `EpisodeDetail.test.tsx` (add the file if not present) that stubs `ankiNotesInfo` to return only a subset of notes and verifies the missing ones are not rendered.
|
||||
|
||||
### Other call sites
|
||||
|
||||
- Grep so far shows `ankiNotesInfo` is only used in `EpisodeDetail.tsx`. Re-verify before landing the commit; if another call site appears, apply the same filter.
|
||||
|
||||
---
|
||||
|
||||
## Item 7 — Trend/watch chart clarity pass
|
||||
|
||||
### Current behavior
|
||||
|
||||
`TrendChart.tsx`, `StackedTrendChart.tsx`, and `WatchTimeChart.tsx` render Recharts components with:
|
||||
- No `CartesianGrid` → no horizontal reference lines.
|
||||
- 9px axis ticks → borderline unreadable.
|
||||
- Height 120 → cramped.
|
||||
- Tooltip uses raw labels (`04/04` etc.).
|
||||
- No shared theme object; each chart redefines colors and tooltip styles inline.
|
||||
|
||||
`stats/src/lib/chart-theme.ts` already exists and currently exports a single `CHART_THEME` constant with tick/tooltip colors and `barFill`. It will be extended, not replaced, to preserve existing consumers.
|
||||
|
||||
### Target behavior
|
||||
|
||||
All three charts share a theme, have horizontal gridlines, readable ticks, and sensible tooltips.
|
||||
|
||||
### Implementation
|
||||
|
||||
Extend `stats/src/lib/chart-theme.ts` with the additional shared defaults (keeping the existing `CHART_THEME` export intact so current consumers don't break):
|
||||
```ts
|
||||
export const CHART_THEME = {
|
||||
tick: '#a5adcb',
|
||||
tooltipBg: '#363a4f',
|
||||
tooltipBorder: '#494d64',
|
||||
tooltipText: '#cad3f5',
|
||||
tooltipLabel: '#b8c0e0',
|
||||
barFill: '#8aadf4',
|
||||
grid: '#494d64',
|
||||
axisLine: '#494d64',
|
||||
} as const;
|
||||
|
||||
export const CHART_DEFAULTS = {
|
||||
height: 160,
|
||||
tickFontSize: 11,
|
||||
margin: { top: 8, right: 8, bottom: 0, left: 0 },
|
||||
grid: { strokeDasharray: '3 3', vertical: false },
|
||||
} as const;
|
||||
|
||||
export const TOOLTIP_CONTENT_STYLE = {
|
||||
background: CHART_THEME.tooltipBg,
|
||||
border: `1px solid ${CHART_THEME.tooltipBorder}`,
|
||||
borderRadius: 6,
|
||||
color: CHART_THEME.tooltipText,
|
||||
fontSize: 12,
|
||||
};
|
||||
```
|
||||
|
||||
Apply to each chart:
|
||||
- Import `CartesianGrid` from recharts.
|
||||
- Insert `<CartesianGrid stroke={CHART_THEME.grid} {...CHART_DEFAULTS.grid} />` inside each chart container.
|
||||
- `<XAxis tick={{ fontSize: CHART_DEFAULTS.tickFontSize, fill: CHART_THEME.tick }} />` and equivalent `YAxis`.
|
||||
- `YAxis` gains `axisLine={{ stroke: CHART_THEME.axisLine }}`.
|
||||
- `ResponsiveContainer` height changes from 120 → `CHART_DEFAULTS.height`.
|
||||
- `Tooltip` `contentStyle` uses `TOOLTIP_CONTENT_STYLE`, and charts pass a `labelFormatter` when the label is a date key (e.g. show `Fri Apr 4`).
|
||||
|
||||
### Unit formatters
|
||||
|
||||
- `TrendChart` already accepts a `formatter` prop — extend usage sites to pass unit-aware formatters where they aren't already (`formatDuration`, `formatNumber`, etc.).
|
||||
|
||||
### Tests
|
||||
|
||||
- `chart-theme.test.ts` (if present — otherwise add a trivial snapshot to keep the shape stable).
|
||||
- `TrendChart` snapshot/render tests: no regression, gridline element present.
|
||||
|
||||
---
|
||||
|
||||
## Verification gate
|
||||
|
||||
Before requesting code review, run:
|
||||
|
||||
```
|
||||
bun run typecheck
|
||||
bun run test:fast
|
||||
bun run test:env
|
||||
bun run test:runtime:compat # dist-sensitive check for the charts
|
||||
bun run build
|
||||
bun run test:smoke:dist
|
||||
```
|
||||
|
||||
No docs-site changes are planned in this spec; if `docs-site/` ends up touched (e.g. screenshots), also run `bun run docs:test` and `bun run docs:build`.
|
||||
|
||||
No config schema changes → `bun run test:config` and `bun run generate:config-example` are not required.
|
||||
|
||||
## Risks and open questions
|
||||
|
||||
- **MediaDetailView refresh**: `useMediaLibrary` may not expose a `refresh` function. If it doesn't, the simplest path is adding one; the alternative (keying a remount) works but is harder to test. Decide during implementation.
|
||||
- **Session bucket delete UX**: "Delete all N sessions in this group" is powerful. The copy must make it clear the underlying sessions are being removed, not just the grouping. Reuse `confirmBucketDelete` wording from existing confirm helpers if possible.
|
||||
- **Anki-deleted-cards hidden notice**: Showing a subtle "N cards hidden" footer is a call that can be made at PR review.
|
||||
- **Bucket delete helper**: `confirmBucketDelete` does not currently exist in `delete-confirm.ts`. Implementation either adds it or reuses `confirmDayGroupDelete` with bucket-specific wording — decide during the session-rollup commit.
|
||||
|
||||
## Changelog entry
|
||||
|
||||
User-visible PR → needs a fragment under `changes/*.md`. Suggested title:
|
||||
`Stats dashboard: collapsible series, session rollups, 365d trends, chart polish, episode delete.`
|
||||
@@ -2,7 +2,7 @@
|
||||
"name": "subminer",
|
||||
"productName": "SubMiner",
|
||||
"desktopName": "SubMiner.desktop",
|
||||
"version": "0.12.0-beta.3",
|
||||
"version": "0.12.0",
|
||||
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
|
||||
"packageManager": "bun@1.3.5",
|
||||
"main": "dist/main-entry.js",
|
||||
@@ -20,7 +20,7 @@
|
||||
"dev:stats": "cd stats && bun run dev",
|
||||
"build": "bun run build:yomitan && bun run build:stats && tsc -p tsconfig.json && bun run build:renderer && bun run build:launcher && bun run build:assets",
|
||||
"build:renderer": "esbuild src/renderer/renderer.ts --bundle --platform=browser --format=esm --target=es2022 --outfile=dist/renderer/renderer.js --sourcemap",
|
||||
"changelog:build": "bun run scripts/build-changelog.ts build && bun run changelog:docs",
|
||||
"changelog:build": "bun run scripts/build-changelog.ts build-release",
|
||||
"changelog:check": "bun run scripts/build-changelog.ts check",
|
||||
"changelog:docs": "bun run scripts/build-changelog.ts docs",
|
||||
"changelog:lint": "bun run scripts/build-changelog.ts lint",
|
||||
@@ -45,7 +45,7 @@
|
||||
"test:config:src": "bun test src/config/config.test.ts src/config/path-resolution.test.ts src/config/resolve/anki-connect.test.ts src/config/resolve/integrations.test.ts src/config/resolve/subtitle-style.test.ts src/config/resolve/jellyfin.test.ts src/config/definitions/domain-registry.test.ts src/generate-config-example.test.ts src/verify-config-example.test.ts",
|
||||
"test:config:dist": "bun test dist/config/config.test.js dist/config/path-resolution.test.js dist/config/resolve/anki-connect.test.js dist/config/resolve/integrations.test.js dist/config/resolve/subtitle-style.test.js dist/config/resolve/jellyfin.test.js dist/config/definitions/domain-registry.test.js dist/generate-config-example.test.js dist/verify-config-example.test.js",
|
||||
"test:config:smoke:dist": "bun test dist/config/path-resolution.test.js",
|
||||
"test:plugin:src": "lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
|
||||
"test:plugin:src": "lua scripts/test-plugin-lua-compat.lua && lua scripts/test-plugin-start-gate.lua && lua scripts/test-plugin-binary-windows.lua",
|
||||
"test:launcher:smoke:src": "bun test launcher/smoke.e2e.test.ts",
|
||||
"test:launcher:src": "bun test launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/mpv.test.ts launcher/picker.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/smoke.e2e.test.ts && bun run test:plugin:src",
|
||||
"test:core:src": "bun test src/cli/args.test.ts src/cli/help.test.ts src/shared/setup-state.test.ts src/core/services/cli-command.test.ts src/core/services/field-grouping-overlay.test.ts src/core/services/numeric-shortcut-session.test.ts src/core/services/secondary-subtitle.test.ts src/core/services/mpv-render-metrics.test.ts src/core/services/overlay-content-measurement.test.ts src/core/services/mpv-control.test.ts src/core/services/mpv.test.ts src/core/services/runtime-options-ipc.test.ts src/core/services/runtime-config.test.ts src/core/services/yomitan-extension-paths.test.ts src/core/services/config-hot-reload.test.ts src/core/services/discord-presence.test.ts src/core/services/tokenizer.test.ts src/core/services/tokenizer/annotation-stage.test.ts src/core/services/tokenizer/parser-selection-stage.test.ts src/core/services/tokenizer/parser-enrichment-stage.test.ts src/core/services/subsync.test.ts src/core/services/overlay-bridge.test.ts src/core/services/overlay-shortcut-handler.test.ts src/core/services/stats-window.test.ts src/core/services/mining.test.ts src/core/services/anki-jimaku.test.ts src/core/services/jimaku-download-path.test.ts src/core/services/jellyfin.test.ts src/core/services/jellyfin-remote.test.ts src/core/services/immersion-tracker-service.test.ts src/core/services/overlay-runtime-init.test.ts src/core/services/app-ready.test.ts src/core/services/startup-bootstrap.test.ts src/core/services/subtitle-processing-controller.test.ts src/core/services/anilist/anilist-update-queue.test.ts src/core/services/anilist/rate-limiter.test.ts src/core/services/jlpt-token-filter.test.ts src/core/services/subtitle-position.test.ts src/core/utils/shortcut-config.test.ts src/main/runtime/first-run-setup-plugin.test.ts src/main/runtime/first-run-setup-service.test.ts src/main/runtime/first-run-setup-window.test.ts src/main/runtime/tray-runtime.test.ts src/main/runtime/tray-main-actions.test.ts src/main/runtime/tray-main-deps.test.ts src/main/runtime/tray-runtime-handlers.test.ts src/main/runtime/cli-command-context-main-deps.test.ts src/main/runtime/app-ready-main-deps.test.ts src/renderer/error-recovery.test.ts src/renderer/subtitle-render.test.ts src/renderer/handlers/mouse.test.ts src/renderer/handlers/keyboard.test.ts src/renderer/modals/jimaku.test.ts src/subsync/utils.test.ts src/main/anilist-url-guard.test.ts src/window-trackers/hyprland-tracker.test.ts src/window-trackers/x11-tracker.test.ts src/window-trackers/windows-helper.test.ts src/window-trackers/windows-tracker.test.ts launcher/config.test.ts launcher/config-domain-parsers.test.ts launcher/config/cli-parser-builder.test.ts launcher/config/args-normalizer.test.ts launcher/parse-args.test.ts launcher/main.test.ts launcher/commands/command-modules.test.ts launcher/setup-gate.test.ts stats/src/lib/api-client.test.ts",
|
||||
|
||||
@@ -71,7 +71,13 @@ function M.create(ctx)
|
||||
end
|
||||
|
||||
for _, dir in ipairs(candidates) do
|
||||
if file_exists(join_path(dir, "config.jsonc")) or file_exists(join_path(dir, "config.json")) or file_exists(dir) then
|
||||
if file_exists(join_path(dir, "config.jsonc")) or file_exists(join_path(dir, "config.json")) then
|
||||
return dir
|
||||
end
|
||||
end
|
||||
|
||||
for _, dir in ipairs(candidates) do
|
||||
if file_exists(dir) then
|
||||
return dir
|
||||
end
|
||||
end
|
||||
@@ -108,33 +114,26 @@ function M.create(ctx)
|
||||
if not image then
|
||||
image = line:match('^"([^"]+)"')
|
||||
end
|
||||
if not image then
|
||||
goto continue
|
||||
end
|
||||
if image == "subminer" or image == "subminer.exe" or image == "subminer.appimage" or image == "subminer.app" then
|
||||
return true
|
||||
end
|
||||
if image:find("subminer", 1, true) and not image:find(".lua", 1, true) then
|
||||
return true
|
||||
if image then
|
||||
if image == "subminer" or image == "subminer.exe" or image == "subminer.appimage" or image == "subminer.app" then
|
||||
return true
|
||||
end
|
||||
if image:find("subminer", 1, true) and not image:find(".lua", 1, true) then
|
||||
return true
|
||||
end
|
||||
end
|
||||
else
|
||||
local argv0 = line:match('^"([^"]+)"') or line:match("^%s*([^%s]+)")
|
||||
if not argv0 then
|
||||
goto continue
|
||||
end
|
||||
if argv0:find("subminer.lua", 1, true) or argv0:find("subminer.conf", 1, true) then
|
||||
goto continue
|
||||
end
|
||||
local exe = argv0:match("([^/\\]+)$") or argv0
|
||||
if exe == "SubMiner" or exe == "SubMiner.AppImage" or exe == "SubMiner.exe" or exe == "subminer" or exe == "subminer.appimage" or exe == "subminer.exe" then
|
||||
return true
|
||||
end
|
||||
if exe:find("subminer", 1, true) and exe:find("%.lua", 1, true) == nil and exe:find("%.app", 1, true) == nil then
|
||||
return true
|
||||
if argv0 and not argv0:find("subminer.lua", 1, true) and not argv0:find("subminer.conf", 1, true) then
|
||||
local exe = argv0:match("([^/\\]+)$") or argv0
|
||||
if exe == "SubMiner" or exe == "SubMiner.AppImage" or exe == "SubMiner.exe" or exe == "subminer" or exe == "subminer.appimage" or exe == "subminer.exe" then
|
||||
return true
|
||||
end
|
||||
if exe:find("subminer", 1, true) and exe:find("%.lua", 1, true) == nil and exe:find("%.app", 1, true) == nil then
|
||||
return true
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
return false
|
||||
end
|
||||
|
||||
@@ -189,41 +189,37 @@ function M.create(ctx)
|
||||
local source_len = #plain
|
||||
local cursor = 1
|
||||
for _, token in ipairs(payload.tokens or {}) do
|
||||
if type(token) ~= "table" or type(token.text) ~= "string" or token.text == "" then
|
||||
goto continue
|
||||
end
|
||||
if type(token) == "table" and type(token.text) == "string" and token.text ~= "" then
|
||||
local token_text = token.text
|
||||
local start_pos = nil
|
||||
local end_pos = nil
|
||||
|
||||
local token_text = token.text
|
||||
local start_pos = nil
|
||||
local end_pos = nil
|
||||
|
||||
if type(token.startPos) == "number" and type(token.endPos) == "number" then
|
||||
if token.startPos >= 0 and token.endPos >= token.startPos then
|
||||
local candidate_start = token.startPos + 1
|
||||
local candidate_stop = token.endPos
|
||||
if candidate_start >= 1 and candidate_stop <= source_len and candidate_stop >= candidate_start and plain:sub(candidate_start, candidate_stop) == token_text then
|
||||
start_pos = candidate_start
|
||||
end_pos = candidate_stop
|
||||
if type(token.startPos) == "number" and type(token.endPos) == "number" then
|
||||
if token.startPos >= 0 and token.endPos >= token.startPos then
|
||||
local candidate_start = token.startPos + 1
|
||||
local candidate_stop = token.endPos
|
||||
if candidate_start >= 1 and candidate_stop <= source_len and candidate_stop >= candidate_start and plain:sub(candidate_start, candidate_stop) == token_text then
|
||||
start_pos = candidate_start
|
||||
end_pos = candidate_stop
|
||||
end
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if not start_pos or not end_pos then
|
||||
local fallback_start, fallback_stop = plain:find(token_text, cursor, true)
|
||||
if not fallback_start then
|
||||
fallback_start, fallback_stop = plain:find(token_text, 1, true)
|
||||
if not start_pos or not end_pos then
|
||||
local fallback_start, fallback_stop = plain:find(token_text, cursor, true)
|
||||
if not fallback_start then
|
||||
fallback_start, fallback_stop = plain:find(token_text, 1, true)
|
||||
end
|
||||
start_pos, end_pos = fallback_start, fallback_stop
|
||||
end
|
||||
start_pos, end_pos = fallback_start, fallback_stop
|
||||
end
|
||||
|
||||
if start_pos and end_pos then
|
||||
if token.index == payload.hoveredTokenIndex then
|
||||
return start_pos, end_pos
|
||||
if start_pos and end_pos then
|
||||
if token.index == payload.hoveredTokenIndex then
|
||||
return start_pos, end_pos
|
||||
end
|
||||
cursor = end_pos + 1
|
||||
end
|
||||
cursor = end_pos + 1
|
||||
end
|
||||
|
||||
::continue::
|
||||
end
|
||||
|
||||
return nil
|
||||
|
||||
@@ -313,12 +313,14 @@ function M.create(ctx)
|
||||
|
||||
local previous_binding_names = state.session_binding_names
|
||||
local next_binding_names = {}
|
||||
state.session_binding_generation = (state.session_binding_generation or 0) + 1
|
||||
local generation = state.session_binding_generation
|
||||
|
||||
local timeout_ms = tonumber(artifact.numericSelectionTimeoutMs) or 3000
|
||||
for index, binding in ipairs(artifact.bindings) do
|
||||
local key_name = key_spec_to_mpv_binding(binding.key)
|
||||
if key_name then
|
||||
local name = "subminer-session-binding-" .. tostring(index)
|
||||
local name = "subminer-session-binding-" .. tostring(generation) .. "-" .. tostring(index)
|
||||
next_binding_names[#next_binding_names + 1] = name
|
||||
mp.add_forced_key_binding(key_name, name, function()
|
||||
handle_binding(binding, timeout_ms)
|
||||
|
||||
@@ -33,6 +33,7 @@ function M.new()
|
||||
auto_play_ready_timeout = nil,
|
||||
auto_play_ready_osd_timer = nil,
|
||||
suppress_ready_overlay_restore = false,
|
||||
session_binding_generation = 0,
|
||||
session_binding_names = {},
|
||||
session_numeric_binding_names = {},
|
||||
session_numeric_selection = nil,
|
||||
|
||||
@@ -139,6 +139,49 @@ test('writeChangelogArtifacts skips changelog prepend when release section alrea
|
||||
}
|
||||
});
|
||||
|
||||
test('writeStableReleaseArtifacts reuses the requested version and date for changelog, release notes, and docs-site output', async () => {
|
||||
const { writeStableReleaseArtifacts } = await loadModule();
|
||||
const workspace = createWorkspace('write-stable-release-artifacts');
|
||||
const projectRoot = path.join(workspace, 'SubMiner');
|
||||
|
||||
fs.mkdirSync(path.join(projectRoot, 'changes'), { recursive: true });
|
||||
fs.mkdirSync(path.join(projectRoot, 'docs-site'), { recursive: true });
|
||||
fs.writeFileSync(
|
||||
path.join(projectRoot, 'package.json'),
|
||||
JSON.stringify({ name: 'subminer', version: '0.4.1' }, null, 2),
|
||||
'utf8',
|
||||
);
|
||||
fs.writeFileSync(path.join(projectRoot, 'CHANGELOG.md'), '# Changelog\n', 'utf8');
|
||||
fs.writeFileSync(
|
||||
path.join(projectRoot, 'changes', '001.md'),
|
||||
['type: fixed', 'area: release', '', '- Reused explicit stable release date.'].join('\n'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
try {
|
||||
const result = writeStableReleaseArtifacts({
|
||||
cwd: projectRoot,
|
||||
version: '0.4.1',
|
||||
date: '2026-03-07',
|
||||
});
|
||||
|
||||
assert.deepEqual(result.outputPaths, [path.join(projectRoot, 'CHANGELOG.md')]);
|
||||
assert.equal(result.releaseNotesPath, path.join(projectRoot, 'release', 'release-notes.md'));
|
||||
assert.equal(result.docsChangelogPath, path.join(projectRoot, 'docs-site', 'changelog.md'));
|
||||
|
||||
const changelog = fs.readFileSync(path.join(projectRoot, 'CHANGELOG.md'), 'utf8');
|
||||
const docsChangelog = fs.readFileSync(
|
||||
path.join(projectRoot, 'docs-site', 'changelog.md'),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
assert.match(changelog, /## v0\.4\.1 \(2026-03-07\)/);
|
||||
assert.match(docsChangelog, /## v0\.4\.1 \(2026-03-07\)/);
|
||||
} finally {
|
||||
fs.rmSync(workspace, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
test('verifyChangelogReadyForRelease ignores README but rejects pending fragments and missing version sections', async () => {
|
||||
const { verifyChangelogReadyForRelease } = await loadModule();
|
||||
const workspace = createWorkspace('verify-release');
|
||||
@@ -362,11 +405,11 @@ test('writePrereleaseNotesForVersion writes cumulative beta notes without mutati
|
||||
|
||||
const prereleaseNotes = fs.readFileSync(outputPath, 'utf8');
|
||||
assert.match(prereleaseNotes, /^> This is a prerelease build for testing\./m);
|
||||
assert.match(prereleaseNotes, /## Highlights\n### Added\n- Overlay: Added prerelease coverage\./);
|
||||
assert.match(
|
||||
prereleaseNotes,
|
||||
/### Fixed\n- Launcher: Fixed prerelease packaging checks\./,
|
||||
/## Highlights\n### Added\n- Overlay: Added prerelease coverage\./,
|
||||
);
|
||||
assert.match(prereleaseNotes, /### Fixed\n- Launcher: Fixed prerelease packaging checks\./);
|
||||
assert.match(prereleaseNotes, /## Installation\n\nSee the README and docs\/installation guide/);
|
||||
} finally {
|
||||
fs.rmSync(workspace, { recursive: true, force: true });
|
||||
|
||||
@@ -430,6 +430,21 @@ export function writeChangelogArtifacts(options?: ChangelogOptions): {
|
||||
};
|
||||
}
|
||||
|
||||
export function writeStableReleaseArtifacts(options?: ChangelogOptions): {
|
||||
deletedFragmentPaths: string[];
|
||||
docsChangelogPath: string;
|
||||
outputPaths: string[];
|
||||
releaseNotesPath: string;
|
||||
} {
|
||||
const changelogResult = writeChangelogArtifacts(options);
|
||||
const docsChangelogPath = generateDocsChangelog(options);
|
||||
|
||||
return {
|
||||
...changelogResult,
|
||||
docsChangelogPath,
|
||||
};
|
||||
}
|
||||
|
||||
export function verifyChangelogFragments(options?: ChangelogOptions): void {
|
||||
readChangeFragments(options?.cwd ?? process.cwd(), options?.deps);
|
||||
}
|
||||
@@ -726,6 +741,11 @@ function main(): void {
|
||||
return;
|
||||
}
|
||||
|
||||
if (command === 'build-release') {
|
||||
writeStableReleaseArtifacts(options);
|
||||
return;
|
||||
}
|
||||
|
||||
if (command === 'check') {
|
||||
verifyChangelogReadyForRelease(options);
|
||||
return;
|
||||
|
||||
141
scripts/test-plugin-lua-compat.lua
Normal file
141
scripts/test-plugin-lua-compat.lua
Normal file
@@ -0,0 +1,141 @@
|
||||
local MODULE_PATHS = {
|
||||
"plugin/subminer/hover.lua",
|
||||
"plugin/subminer/environment.lua",
|
||||
}
|
||||
|
||||
local LEGACY_PARSER_CANDIDATES = {
|
||||
"luajit",
|
||||
"lua5.1",
|
||||
"lua51",
|
||||
}
|
||||
|
||||
local function assert_true(condition, message)
|
||||
if condition then
|
||||
return
|
||||
end
|
||||
error(message or "assert_true failed")
|
||||
end
|
||||
|
||||
local function read_file(path)
|
||||
local file = assert(io.open(path, "r"), "failed to open " .. path)
|
||||
local content = file:read("*a")
|
||||
file:close()
|
||||
return content
|
||||
end
|
||||
|
||||
local function find_legacy_incompatible_continue(source)
|
||||
local goto_start, goto_end = source:find("%f[%a]goto%s+continue%f[%A]")
|
||||
if goto_start then
|
||||
return "goto continue", goto_start, goto_end
|
||||
end
|
||||
|
||||
local label_start, label_end = source:find("::continue::", 1, true)
|
||||
if label_start then
|
||||
return "::continue::", label_start, label_end
|
||||
end
|
||||
|
||||
return nil
|
||||
end
|
||||
|
||||
local function assert_no_legacy_incompatible_continue(path)
|
||||
local source = read_file(path)
|
||||
local match = find_legacy_incompatible_continue(source)
|
||||
assert_true(match == nil, path .. " still contains legacy-incompatible continue control flow: " .. tostring(match))
|
||||
end
|
||||
|
||||
local function assert_loadfile_ok(path)
|
||||
local chunk, err = loadfile(path)
|
||||
assert_true(chunk ~= nil, "loadfile failed for " .. path .. ": " .. tostring(err))
|
||||
end
|
||||
|
||||
local function normalize_execute_result(ok, why, code)
|
||||
if type(ok) == "number" then
|
||||
return ok == 0, ok
|
||||
end
|
||||
if type(ok) == "boolean" then
|
||||
if ok then
|
||||
return true, code or 0
|
||||
end
|
||||
return false, code or 1
|
||||
end
|
||||
return false, code or 1
|
||||
end
|
||||
|
||||
local function command_succeeds(command)
|
||||
local ok, why, code = os.execute(command)
|
||||
return normalize_execute_result(ok, why, code)
|
||||
end
|
||||
|
||||
local function command_exists(command)
|
||||
local shell = package.config:sub(1, 1) == "\\" and "where " or "command -v "
|
||||
local redirect = package.config:sub(1, 1) == "\\" and " >NUL 2>NUL" or " >/dev/null 2>&1"
|
||||
local escaped = command
|
||||
local success = command_succeeds(shell .. escaped .. redirect)
|
||||
return success
|
||||
end
|
||||
|
||||
local function find_legacy_parser()
|
||||
for _, command in ipairs(LEGACY_PARSER_CANDIDATES) do
|
||||
if command_exists(command) then
|
||||
return command
|
||||
end
|
||||
end
|
||||
return nil
|
||||
end
|
||||
|
||||
local function shell_redirect()
|
||||
if package.config:sub(1, 1) == "\\" then
|
||||
return " >NUL 2>NUL"
|
||||
end
|
||||
return " >/dev/null 2>&1"
|
||||
end
|
||||
|
||||
local function assert_parser_accepts_file(parser, path)
|
||||
local command = string.format("%s -e %q%s", parser, "assert(loadfile(" .. string.format("%q", path) .. "))", shell_redirect())
|
||||
local success = command_succeeds(command)
|
||||
assert_true(success, parser .. " failed to parse " .. path)
|
||||
end
|
||||
|
||||
local function assert_parser_rejects_legacy_fixture(parser)
|
||||
local legacy_fixture = [[
|
||||
local tokens = {}
|
||||
for _, token in ipairs(tokens or {}) do
|
||||
if type(token) ~= "table" then
|
||||
goto continue
|
||||
end
|
||||
::continue::
|
||||
end
|
||||
]]
|
||||
local command = string.format("%s -e %q%s", parser, legacy_fixture, shell_redirect())
|
||||
local success = command_succeeds(command)
|
||||
assert_true(not success, parser .. " unexpectedly accepted legacy goto/label continue fixture")
|
||||
end
|
||||
|
||||
do
|
||||
local legacy_fixture = [[
|
||||
for _, token in ipairs(tokens or {}) do
|
||||
if type(token) ~= "table" then
|
||||
goto continue
|
||||
end
|
||||
::continue::
|
||||
end
|
||||
]]
|
||||
local match = find_legacy_incompatible_continue(legacy_fixture)
|
||||
assert_true(match ~= nil, "legacy fixture should trigger incompatible continue detector")
|
||||
end
|
||||
|
||||
for _, path in ipairs(MODULE_PATHS) do
|
||||
assert_no_legacy_incompatible_continue(path)
|
||||
assert_loadfile_ok(path)
|
||||
end
|
||||
|
||||
local parser = find_legacy_parser()
|
||||
if parser then
|
||||
assert_parser_rejects_legacy_fixture(parser)
|
||||
for _, path in ipairs(MODULE_PATHS) do
|
||||
assert_parser_accepts_file(parser, path)
|
||||
end
|
||||
print("plugin lua compatibility regression tests: OK (" .. parser .. ")")
|
||||
else
|
||||
print("plugin lua compatibility regression tests: OK (legacy parser unavailable; structural checks only)")
|
||||
end
|
||||
@@ -107,11 +107,7 @@ test('parseArgs captures session action forwarding flags', () => {
|
||||
});
|
||||
|
||||
test('parseArgs ignores non-positive numeric session action counts', () => {
|
||||
const args = parseArgs([
|
||||
'--copy-subtitle-count=0',
|
||||
'--mine-sentence-count',
|
||||
'-1',
|
||||
]);
|
||||
const args = parseArgs(['--copy-subtitle-count=0', '--mine-sentence-count', '-1']);
|
||||
|
||||
assert.equal(args.copySubtitleCount, undefined);
|
||||
assert.equal(args.mineSentenceCount, undefined);
|
||||
@@ -221,16 +217,16 @@ test('hasExplicitCommand and shouldStartApp preserve command intent', () => {
|
||||
assert.equal(hasExplicitCommand(toggleStatsOverlay), true);
|
||||
assert.equal(shouldStartApp(toggleStatsOverlay), true);
|
||||
|
||||
const cycleRuntimeOption = parseArgs([
|
||||
'--cycle-runtime-option',
|
||||
'anki.autoUpdateNewCards:next',
|
||||
]);
|
||||
const cycleRuntimeOption = parseArgs(['--cycle-runtime-option', 'anki.autoUpdateNewCards:next']);
|
||||
assert.equal(cycleRuntimeOption.cycleRuntimeOptionId, 'anki.autoUpdateNewCards');
|
||||
assert.equal(cycleRuntimeOption.cycleRuntimeOptionDirection, 1);
|
||||
assert.equal(hasExplicitCommand(cycleRuntimeOption), true);
|
||||
assert.equal(shouldStartApp(cycleRuntimeOption), true);
|
||||
assert.equal(commandNeedsOverlayRuntime(cycleRuntimeOption), true);
|
||||
|
||||
const toggleStatsOverlayRuntime = parseArgs(['--toggle-stats-overlay']);
|
||||
assert.equal(commandNeedsOverlayRuntime(toggleStatsOverlayRuntime), true);
|
||||
|
||||
const dictionary = parseArgs(['--dictionary']);
|
||||
assert.equal(dictionary.dictionary, true);
|
||||
assert.equal(hasExplicitCommand(dictionary), true);
|
||||
|
||||
@@ -173,7 +173,10 @@ export function parseArgs(argv: string[]): CliArgs {
|
||||
const separatorIndex = value.lastIndexOf(':');
|
||||
if (separatorIndex <= 0 || separatorIndex === value.length - 1) return null;
|
||||
const id = value.slice(0, separatorIndex).trim();
|
||||
const rawDirection = value.slice(separatorIndex + 1).trim().toLowerCase();
|
||||
const rawDirection = value
|
||||
.slice(separatorIndex + 1)
|
||||
.trim()
|
||||
.toLowerCase();
|
||||
if (!id) return null;
|
||||
if (rawDirection === 'next' || rawDirection === '1') {
|
||||
return { id, direction: 1 };
|
||||
@@ -686,6 +689,7 @@ export function commandNeedsOverlayRuntime(args: CliArgs): boolean {
|
||||
args.mineSentenceMultiple ||
|
||||
args.updateLastCardFromClipboard ||
|
||||
args.toggleSecondarySub ||
|
||||
args.toggleStatsOverlay ||
|
||||
args.toggleSubtitleSidebar ||
|
||||
args.triggerFieldGrouping ||
|
||||
args.triggerSubsync ||
|
||||
|
||||
@@ -50,6 +50,7 @@ test('loads defaults when config is missing', () => {
|
||||
assert.equal(config.startupWarmups.yomitanExtension, true);
|
||||
assert.equal(config.startupWarmups.subtitleDictionaries, true);
|
||||
assert.equal(config.startupWarmups.jellyfinRemoteSession, true);
|
||||
assert.equal(config.shortcuts.toggleSubtitleSidebar, 'Backslash');
|
||||
assert.equal(config.discordPresence.enabled, true);
|
||||
assert.equal(config.discordPresence.updateIntervalMs, 3_000);
|
||||
assert.equal(config.subtitleStyle.backgroundColor, 'rgb(30, 32, 48, 0.88)');
|
||||
|
||||
@@ -91,7 +91,7 @@ export const CORE_DEFAULT_CONFIG: Pick<
|
||||
openSessionHelp: 'CommandOrControl+Shift+H',
|
||||
openControllerSelect: 'Alt+C',
|
||||
openControllerDebug: 'Alt+Shift+C',
|
||||
toggleSubtitleSidebar: '\\',
|
||||
toggleSubtitleSidebar: 'Backslash',
|
||||
},
|
||||
secondarySub: {
|
||||
secondarySubLanguages: [],
|
||||
|
||||
@@ -166,14 +166,20 @@ const TRENDS_DASHBOARD = {
|
||||
ratios: {
|
||||
lookupsPerHundred: [{ label: 'Mar 1', value: 5 }],
|
||||
},
|
||||
animePerDay: {
|
||||
episodes: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 1 }],
|
||||
watchTime: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 25 }],
|
||||
cards: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 5 }],
|
||||
words: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 300 }],
|
||||
lookups: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 15 }],
|
||||
lookupsPerHundred: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 5 }],
|
||||
},
|
||||
librarySummary: [
|
||||
{
|
||||
title: 'Little Witch Academia',
|
||||
watchTimeMin: 25,
|
||||
videos: 1,
|
||||
sessions: 1,
|
||||
cards: 5,
|
||||
words: 300,
|
||||
lookups: 15,
|
||||
lookupsPerHundred: 5,
|
||||
firstWatched: 20_000,
|
||||
lastWatched: 20_000,
|
||||
},
|
||||
],
|
||||
animeCumulative: {
|
||||
watchTime: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 25 }],
|
||||
episodes: [{ epochDay: 20_000, animeTitle: 'Little Witch Academia', value: 1 }],
|
||||
@@ -598,7 +604,23 @@ describe('stats server API routes', () => {
|
||||
const body = await res.json();
|
||||
assert.deepEqual(seenArgs, ['90d', 'month']);
|
||||
assert.deepEqual(body.activity.watchTime, TRENDS_DASHBOARD.activity.watchTime);
|
||||
assert.deepEqual(body.animePerDay.watchTime, TRENDS_DASHBOARD.animePerDay.watchTime);
|
||||
assert.deepEqual(body.librarySummary, TRENDS_DASHBOARD.librarySummary);
|
||||
});
|
||||
|
||||
it('GET /api/stats/trends/dashboard accepts 365d range', async () => {
|
||||
let seenArgs: unknown[] = [];
|
||||
const app = createStatsApp(
|
||||
createMockTracker({
|
||||
getTrendsDashboard: async (...args: unknown[]) => {
|
||||
seenArgs = args;
|
||||
return TRENDS_DASHBOARD;
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
const res = await app.request('/api/stats/trends/dashboard?range=365d&groupBy=month');
|
||||
assert.equal(res.status, 200);
|
||||
assert.deepEqual(seenArgs, ['365d', 'month']);
|
||||
});
|
||||
|
||||
it('GET /api/stats/trends/dashboard falls back to safe defaults for invalid params', async () => {
|
||||
|
||||
@@ -75,9 +75,7 @@ test('runAppReadyRuntime starts websocket in auto mode when plugin missing', asy
|
||||
calls.indexOf('setVisibleOverlayVisible:true') < calls.indexOf('initializeOverlayRuntime'),
|
||||
);
|
||||
assert.ok(calls.includes('startBackgroundWarmups'));
|
||||
assert.ok(
|
||||
calls.includes('log:Runtime ready: immersion tracker startup requested.'),
|
||||
);
|
||||
assert.ok(calls.includes('log:Runtime ready: immersion tracker startup requested.'));
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime starts texthooker on startup when enabled in config', async () => {
|
||||
@@ -108,8 +106,8 @@ test('runAppReadyRuntime creates immersion tracker during heavy startup', async
|
||||
|
||||
await runAppReadyRuntime(deps);
|
||||
|
||||
assert.ok(calls.includes('createImmersionTracker'));
|
||||
assert.ok(calls.indexOf('createImmersionTracker') < calls.indexOf('handleInitialArgs'));
|
||||
assert.equal(calls.includes('createImmersionTracker'), false);
|
||||
assert.ok(calls.includes('log:Runtime ready: immersion tracker startup requested.'));
|
||||
});
|
||||
|
||||
test('runAppReadyRuntime keeps annotation websocket enabled when regular websocket auto-skips', async () => {
|
||||
|
||||
@@ -277,12 +277,7 @@ export function handleCliCommand(
|
||||
logLabel: string,
|
||||
osdLabel: string,
|
||||
): void => {
|
||||
runAsyncWithOsd(
|
||||
() => deps.dispatchSessionAction?.(request) ?? Promise.resolve(),
|
||||
deps,
|
||||
logLabel,
|
||||
osdLabel,
|
||||
);
|
||||
runAsyncWithOsd(() => deps.dispatchSessionAction(request), deps, logLabel, osdLabel);
|
||||
};
|
||||
|
||||
if (args.logLevel) {
|
||||
|
||||
@@ -488,7 +488,7 @@ export class ImmersionTrackerService {
|
||||
}
|
||||
|
||||
async getTrendsDashboard(
|
||||
range: '7d' | '30d' | '90d' | 'all' = '30d',
|
||||
range: '7d' | '30d' | '90d' | '365d' | 'all' = '30d',
|
||||
groupBy: 'day' | 'month' = 'day',
|
||||
): Promise<unknown> {
|
||||
return getTrendsDashboard(this.db, range, groupBy);
|
||||
|
||||
@@ -687,7 +687,7 @@ test('getTrendsDashboard returns chart-ready aggregated series', () => {
|
||||
assert.equal(dashboard.progress.watchTime[1]?.value, 75);
|
||||
assert.equal(dashboard.progress.lookups[1]?.value, 18);
|
||||
assert.equal(dashboard.ratios.lookupsPerHundred[0]?.value, +((8 / 120) * 100).toFixed(1));
|
||||
assert.equal(dashboard.animePerDay.watchTime[0]?.animeTitle, 'Trend Dashboard Anime');
|
||||
assert.equal(dashboard.librarySummary[0]?.title, 'Trend Dashboard Anime');
|
||||
assert.equal(dashboard.animeCumulative.watchTime[1]?.value, 75);
|
||||
assert.equal(
|
||||
dashboard.patterns.watchTimeByDayOfWeek.reduce((sum, point) => sum + point.value, 0),
|
||||
@@ -835,6 +835,65 @@ test('getTrendsDashboard keeps local-midnight session buckets separate', () => {
|
||||
}
|
||||
});
|
||||
|
||||
test('getTrendsDashboard supports 365d range and caps day buckets at 365', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
withMockNowMs('1772395200000', () => {
|
||||
try {
|
||||
ensureSchema(db);
|
||||
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/365d-trends.mkv', {
|
||||
canonicalTitle: '365d Trends',
|
||||
sourcePath: '/tmp/365d-trends.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const animeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: '365d Trends',
|
||||
canonicalTitle: '365d Trends',
|
||||
anilistId: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
titleNative: null,
|
||||
metadataJson: null,
|
||||
});
|
||||
linkVideoToAnimeRecord(db, videoId, {
|
||||
animeId,
|
||||
parsedBasename: '365d-trends.mkv',
|
||||
parsedTitle: '365d Trends',
|
||||
parsedSeason: 1,
|
||||
parsedEpisode: 1,
|
||||
parserSource: 'test',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: null,
|
||||
});
|
||||
|
||||
const insertDailyRollup = db.prepare(
|
||||
`
|
||||
INSERT INTO imm_daily_rollups (
|
||||
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_tokens_seen, total_cards, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
`,
|
||||
);
|
||||
// Seed 400 distinct rollup days so we can prove the 365d range caps at 365.
|
||||
const latestRollupDay = 20513;
|
||||
const createdAtMs = '1772395200000';
|
||||
for (let offset = 0; offset < 400; offset += 1) {
|
||||
const rollupDay = latestRollupDay - offset;
|
||||
insertDailyRollup.run(rollupDay, videoId, 1, 30, 4, 100, 2, createdAtMs, createdAtMs);
|
||||
}
|
||||
|
||||
const dashboard = getTrendsDashboard(db, '365d', 'day');
|
||||
|
||||
assert.equal(dashboard.activity.watchTime.length, 365);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('getTrendsDashboard month grouping spans every touched calendar month and keeps progress monthly', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
@@ -1879,6 +1938,50 @@ test('getSessionEvents returns events ordered by ts_ms ascending', () => {
|
||||
}
|
||||
});
|
||||
|
||||
test('getSessionEvents round-trips wall-clock timestamps written through event inserts', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const stmts = createTrackerPreparedStatements(db);
|
||||
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/events-wall-clock.mkv', {
|
||||
canonicalTitle: 'Events Wall Clock',
|
||||
sourcePath: '/tmp/events-wall-clock.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
|
||||
const startedAtMs = Date.now() - 10_000;
|
||||
const eventTsMs = startedAtMs + 5_000;
|
||||
const { sessionId } = startSessionRecord(db, videoId, startedAtMs);
|
||||
|
||||
stmts.eventInsertStmt.run(
|
||||
sessionId,
|
||||
toDbTimestamp(eventTsMs),
|
||||
EVENT_SUBTITLE_LINE,
|
||||
0,
|
||||
0,
|
||||
500,
|
||||
1,
|
||||
0,
|
||||
'{"line":"wall-clock"}',
|
||||
toDbTimestamp(eventTsMs),
|
||||
toDbTimestamp(eventTsMs),
|
||||
);
|
||||
|
||||
const events = getSessionEvents(db, sessionId, 10);
|
||||
|
||||
assert.equal(events.length, 1);
|
||||
assert.equal(events[0]?.tsMs, eventTsMs);
|
||||
assert.equal(events[0]?.payload, '{"line":"wall-clock"}');
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('getSessionEvents returns empty array for session with no events', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
@@ -3666,3 +3769,206 @@ test('deleteSession removes zero-session media from library and trends', () => {
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('getTrendsDashboard builds librarySummary with per-title aggregates', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const stmts = createTrackerPreparedStatements(db);
|
||||
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/library-summary-test.mkv', {
|
||||
canonicalTitle: 'Library Summary Test',
|
||||
sourcePath: '/tmp/library-summary-test.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const animeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: 'Summary Anime',
|
||||
canonicalTitle: 'Summary Anime',
|
||||
anilistId: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
titleNative: null,
|
||||
metadataJson: null,
|
||||
});
|
||||
linkVideoToAnimeRecord(db, videoId, {
|
||||
animeId,
|
||||
parsedBasename: 'library-summary-test.mkv',
|
||||
parsedTitle: 'Summary Anime',
|
||||
parsedSeason: 1,
|
||||
parsedEpisode: 1,
|
||||
parserSource: 'test',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: null,
|
||||
});
|
||||
|
||||
const dayOneStart = 1_700_000_000_000;
|
||||
const dayTwoStart = dayOneStart + 86_400_000;
|
||||
|
||||
const sessionOne = startSessionRecord(db, videoId, dayOneStart);
|
||||
const sessionTwo = startSessionRecord(db, videoId, dayTwoStart);
|
||||
|
||||
for (const [sessionId, startedAtMs, activeMs, cards, tokens, lookups] of [
|
||||
[sessionOne.sessionId, dayOneStart, 30 * 60_000, 2, 120, 8],
|
||||
[sessionTwo.sessionId, dayTwoStart, 45 * 60_000, 3, 140, 10],
|
||||
] as const) {
|
||||
stmts.telemetryInsertStmt.run(
|
||||
sessionId,
|
||||
`${startedAtMs + 60_000}`,
|
||||
activeMs,
|
||||
activeMs,
|
||||
10,
|
||||
tokens,
|
||||
cards,
|
||||
0,
|
||||
0,
|
||||
lookups,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
`${startedAtMs + 60_000}`,
|
||||
`${startedAtMs + 60_000}`,
|
||||
);
|
||||
|
||||
db.prepare(
|
||||
`
|
||||
UPDATE imm_sessions
|
||||
SET ended_at_ms = ?, total_watched_ms = ?, active_watched_ms = ?,
|
||||
lines_seen = ?, tokens_seen = ?, cards_mined = ?, yomitan_lookup_count = ?
|
||||
WHERE session_id = ?
|
||||
`,
|
||||
).run(`${startedAtMs + activeMs}`, activeMs, activeMs, 10, tokens, cards, lookups, sessionId);
|
||||
}
|
||||
|
||||
for (const [day, active, tokens, cards] of [
|
||||
[Math.floor(dayOneStart / 86_400_000), 30, 120, 2],
|
||||
[Math.floor(dayTwoStart / 86_400_000), 45, 140, 3],
|
||||
] as const) {
|
||||
db.prepare(
|
||||
`
|
||||
INSERT INTO imm_daily_rollups (
|
||||
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_tokens_seen, total_cards
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`,
|
||||
).run(day, videoId, 1, active, 10, tokens, cards);
|
||||
}
|
||||
|
||||
const dashboard = getTrendsDashboard(db, 'all', 'day');
|
||||
|
||||
assert.equal(dashboard.librarySummary.length, 1);
|
||||
const row = dashboard.librarySummary[0]!;
|
||||
assert.equal(row.title, 'Summary Anime');
|
||||
assert.equal(row.watchTimeMin, 75);
|
||||
assert.equal(row.videos, 1);
|
||||
assert.equal(row.sessions, 2);
|
||||
assert.equal(row.cards, 5);
|
||||
assert.equal(row.words, 260);
|
||||
assert.equal(row.lookups, 18);
|
||||
assert.equal(row.lookupsPerHundred, +((18 / 260) * 100).toFixed(1));
|
||||
assert.equal(row.firstWatched, Math.floor(dayOneStart / 86_400_000));
|
||||
assert.equal(row.lastWatched, Math.floor(dayTwoStart / 86_400_000));
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('getTrendsDashboard librarySummary returns null lookupsPerHundred when words is zero', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const stmts = createTrackerPreparedStatements(db);
|
||||
|
||||
const videoId = getOrCreateVideoRecord(db, 'local:/tmp/lib-summary-null.mkv', {
|
||||
canonicalTitle: 'Null Lookups Title',
|
||||
sourcePath: '/tmp/lib-summary-null.mkv',
|
||||
sourceUrl: null,
|
||||
sourceType: SOURCE_TYPE_LOCAL,
|
||||
});
|
||||
const animeId = getOrCreateAnimeRecord(db, {
|
||||
parsedTitle: 'Null Lookups Anime',
|
||||
canonicalTitle: 'Null Lookups Anime',
|
||||
anilistId: null,
|
||||
titleRomaji: null,
|
||||
titleEnglish: null,
|
||||
titleNative: null,
|
||||
metadataJson: null,
|
||||
});
|
||||
linkVideoToAnimeRecord(db, videoId, {
|
||||
animeId,
|
||||
parsedBasename: 'lib-summary-null.mkv',
|
||||
parsedTitle: 'Null Lookups Anime',
|
||||
parsedSeason: 1,
|
||||
parsedEpisode: 1,
|
||||
parserSource: 'test',
|
||||
parserConfidence: 1,
|
||||
parseMetadataJson: null,
|
||||
});
|
||||
|
||||
const startMs = 1_700_000_000_000;
|
||||
const session = startSessionRecord(db, videoId, startMs);
|
||||
stmts.telemetryInsertStmt.run(
|
||||
session.sessionId,
|
||||
`${startMs + 60_000}`,
|
||||
20 * 60_000,
|
||||
20 * 60_000,
|
||||
5,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
`${startMs + 60_000}`,
|
||||
`${startMs + 60_000}`,
|
||||
);
|
||||
db.prepare(
|
||||
`
|
||||
UPDATE imm_sessions
|
||||
SET ended_at_ms = ?, total_watched_ms = ?, active_watched_ms = ?,
|
||||
lines_seen = ?, tokens_seen = ?, cards_mined = ?, yomitan_lookup_count = ?
|
||||
WHERE session_id = ?
|
||||
`,
|
||||
).run(`${startMs + 20 * 60_000}`, 20 * 60_000, 20 * 60_000, 5, 0, 0, 0, session.sessionId);
|
||||
|
||||
db.prepare(
|
||||
`
|
||||
INSERT INTO imm_daily_rollups (
|
||||
rollup_day, video_id, total_sessions, total_active_min, total_lines_seen,
|
||||
total_tokens_seen, total_cards
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
`,
|
||||
).run(Math.floor(startMs / 86_400_000), videoId, 1, 20, 5, 0, 0);
|
||||
|
||||
const dashboard = getTrendsDashboard(db, 'all', 'day');
|
||||
assert.equal(dashboard.librarySummary.length, 1);
|
||||
assert.equal(dashboard.librarySummary[0]!.lookupsPerHundred, null);
|
||||
assert.equal(dashboard.librarySummary[0]!.words, 0);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('getTrendsDashboard librarySummary is empty when no rollups exist', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
ensureSchema(db);
|
||||
const dashboard = getTrendsDashboard(db, 'all', 'day');
|
||||
assert.deepEqual(dashboard.librarySummary, []);
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -66,7 +66,7 @@ export function pruneRawRetention(
|
||||
const deletedSessionEvents = Number.isFinite(policy.eventsRetentionMs)
|
||||
? (
|
||||
db
|
||||
.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`)
|
||||
.prepare(`DELETE FROM imm_session_events WHERE CAST(ts_ms AS REAL) < CAST(? AS REAL)`)
|
||||
.run(resolveCutoff(policy.eventsRetentionMs, policy.eventsRetentionDays)) as {
|
||||
changes: number;
|
||||
}
|
||||
|
||||
@@ -133,7 +133,7 @@ export function getSessionEvents(
|
||||
if (!eventTypes || eventTypes.length === 0) {
|
||||
const stmt = db.prepare(`
|
||||
SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload
|
||||
FROM imm_session_events WHERE session_id = ? ORDER BY ts_ms ASC LIMIT ?
|
||||
FROM imm_session_events WHERE session_id = ? ORDER BY CAST(ts_ms AS REAL) ASC LIMIT ?
|
||||
`);
|
||||
const rows = stmt.all(sessionId, limit) as Array<SessionEventRow & { tsMs: number | string }>;
|
||||
return rows.map((row) => ({
|
||||
@@ -147,7 +147,7 @@ export function getSessionEvents(
|
||||
SELECT event_type AS eventType, ts_ms AS tsMs, payload_json AS payload
|
||||
FROM imm_session_events
|
||||
WHERE session_id = ? AND event_type IN (${placeholders})
|
||||
ORDER BY ts_ms ASC
|
||||
ORDER BY CAST(ts_ms AS REAL) ASC
|
||||
LIMIT ?
|
||||
`);
|
||||
const rows = stmt.all(sessionId, ...eventTypes, limit) as Array<
|
||||
|
||||
@@ -602,7 +602,7 @@ export function getEpisodeCardEvents(db: DatabaseSync, videoId: number): Episode
|
||||
FROM imm_session_events e
|
||||
JOIN imm_sessions s ON s.session_id = e.session_id
|
||||
WHERE s.video_id = ? AND e.event_type = 4
|
||||
ORDER BY e.ts_ms DESC
|
||||
ORDER BY CAST(e.ts_ms AS REAL) DESC
|
||||
`,
|
||||
)
|
||||
.all(videoId) as Array<{
|
||||
|
||||
@@ -345,7 +345,11 @@ export function fromDbTimestamp(ms: number | bigint | string | null | undefined)
|
||||
if (typeof ms === 'bigint') {
|
||||
return Number(ms);
|
||||
}
|
||||
return Number(ms);
|
||||
const normalized = normalizeTimestampString(ms);
|
||||
if (/^-?\d+$/.test(normalized)) {
|
||||
return Number(BigInt(normalized));
|
||||
}
|
||||
return Math.trunc(Number.parseFloat(normalized));
|
||||
}
|
||||
|
||||
function getNumericCalendarValue(
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
} from './query-shared';
|
||||
import { getDailyRollups, getMonthlyRollups } from './query-sessions';
|
||||
|
||||
type TrendRange = '7d' | '30d' | '90d' | 'all';
|
||||
type TrendRange = '7d' | '30d' | '90d' | '365d' | 'all';
|
||||
type TrendGroupBy = 'day' | 'month';
|
||||
|
||||
interface TrendChartPoint {
|
||||
@@ -27,6 +27,19 @@ interface TrendPerAnimePoint {
|
||||
value: number;
|
||||
}
|
||||
|
||||
export interface LibrarySummaryRow {
|
||||
title: string;
|
||||
watchTimeMin: number;
|
||||
videos: number;
|
||||
sessions: number;
|
||||
cards: number;
|
||||
words: number;
|
||||
lookups: number;
|
||||
lookupsPerHundred: number | null;
|
||||
firstWatched: number;
|
||||
lastWatched: number;
|
||||
}
|
||||
|
||||
interface TrendSessionMetricRow {
|
||||
startedAtMs: number;
|
||||
epochDay: number;
|
||||
@@ -61,14 +74,6 @@ export interface TrendsDashboardQueryResult {
|
||||
ratios: {
|
||||
lookupsPerHundred: TrendChartPoint[];
|
||||
};
|
||||
animePerDay: {
|
||||
episodes: TrendPerAnimePoint[];
|
||||
watchTime: TrendPerAnimePoint[];
|
||||
cards: TrendPerAnimePoint[];
|
||||
words: TrendPerAnimePoint[];
|
||||
lookups: TrendPerAnimePoint[];
|
||||
lookupsPerHundred: TrendPerAnimePoint[];
|
||||
};
|
||||
animeCumulative: {
|
||||
watchTime: TrendPerAnimePoint[];
|
||||
episodes: TrendPerAnimePoint[];
|
||||
@@ -79,12 +84,14 @@ export interface TrendsDashboardQueryResult {
|
||||
watchTimeByDayOfWeek: TrendChartPoint[];
|
||||
watchTimeByHour: TrendChartPoint[];
|
||||
};
|
||||
librarySummary: LibrarySummaryRow[];
|
||||
}
|
||||
|
||||
const TREND_DAY_LIMITS: Record<Exclude<TrendRange, 'all'>, number> = {
|
||||
'7d': 7,
|
||||
'30d': 30,
|
||||
'90d': 90,
|
||||
'365d': 365,
|
||||
};
|
||||
|
||||
const MONTH_NAMES = [
|
||||
@@ -300,61 +307,6 @@ function buildLookupsPerHundredWords(
|
||||
});
|
||||
}
|
||||
|
||||
function buildPerAnimeFromSessions(
|
||||
sessions: TrendSessionMetricRow[],
|
||||
getValue: (session: TrendSessionMetricRow) => number,
|
||||
): TrendPerAnimePoint[] {
|
||||
const byAnime = new Map<string, Map<number, number>>();
|
||||
|
||||
for (const session of sessions) {
|
||||
const animeTitle = resolveTrendAnimeTitle(session);
|
||||
const epochDay = session.epochDay;
|
||||
const dayMap = byAnime.get(animeTitle) ?? new Map();
|
||||
dayMap.set(epochDay, (dayMap.get(epochDay) ?? 0) + getValue(session));
|
||||
byAnime.set(animeTitle, dayMap);
|
||||
}
|
||||
|
||||
const result: TrendPerAnimePoint[] = [];
|
||||
for (const [animeTitle, dayMap] of byAnime) {
|
||||
for (const [epochDay, value] of dayMap) {
|
||||
result.push({ epochDay, animeTitle, value });
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildLookupsPerHundredPerAnime(sessions: TrendSessionMetricRow[]): TrendPerAnimePoint[] {
|
||||
const lookups = new Map<string, Map<number, number>>();
|
||||
const words = new Map<string, Map<number, number>>();
|
||||
|
||||
for (const session of sessions) {
|
||||
const animeTitle = resolveTrendAnimeTitle(session);
|
||||
const epochDay = session.epochDay;
|
||||
|
||||
const lookupMap = lookups.get(animeTitle) ?? new Map();
|
||||
lookupMap.set(epochDay, (lookupMap.get(epochDay) ?? 0) + session.yomitanLookupCount);
|
||||
lookups.set(animeTitle, lookupMap);
|
||||
|
||||
const wordMap = words.get(animeTitle) ?? new Map();
|
||||
wordMap.set(epochDay, (wordMap.get(epochDay) ?? 0) + getTrendSessionWordCount(session));
|
||||
words.set(animeTitle, wordMap);
|
||||
}
|
||||
|
||||
const result: TrendPerAnimePoint[] = [];
|
||||
for (const [animeTitle, dayMap] of lookups) {
|
||||
const wordMap = words.get(animeTitle) ?? new Map();
|
||||
for (const [epochDay, lookupCount] of dayMap) {
|
||||
const wordCount = wordMap.get(epochDay) ?? 0;
|
||||
result.push({
|
||||
epochDay,
|
||||
animeTitle,
|
||||
value: wordCount > 0 ? +((lookupCount / wordCount) * 100).toFixed(1) : 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildCumulativePerAnime(points: TrendPerAnimePoint[]): TrendPerAnimePoint[] {
|
||||
const byAnime = new Map<string, Map<number, number>>();
|
||||
const allDays = new Set<number>();
|
||||
@@ -390,6 +342,88 @@ function buildCumulativePerAnime(points: TrendPerAnimePoint[]): TrendPerAnimePoi
|
||||
return result;
|
||||
}
|
||||
|
||||
function buildLibrarySummary(
|
||||
rollups: ImmersionSessionRollupRow[],
|
||||
sessions: TrendSessionMetricRow[],
|
||||
titlesByVideoId: Map<number, string>,
|
||||
): LibrarySummaryRow[] {
|
||||
type Accum = {
|
||||
watchTimeMin: number;
|
||||
videos: Set<number>;
|
||||
cards: number;
|
||||
words: number;
|
||||
firstWatched: number;
|
||||
lastWatched: number;
|
||||
sessions: number;
|
||||
lookups: number;
|
||||
};
|
||||
|
||||
const byTitle = new Map<string, Accum>();
|
||||
|
||||
const ensure = (title: string): Accum => {
|
||||
const existing = byTitle.get(title);
|
||||
if (existing) return existing;
|
||||
const created: Accum = {
|
||||
watchTimeMin: 0,
|
||||
videos: new Set<number>(),
|
||||
cards: 0,
|
||||
words: 0,
|
||||
firstWatched: Number.POSITIVE_INFINITY,
|
||||
lastWatched: Number.NEGATIVE_INFINITY,
|
||||
sessions: 0,
|
||||
lookups: 0,
|
||||
};
|
||||
byTitle.set(title, created);
|
||||
return created;
|
||||
};
|
||||
|
||||
for (const rollup of rollups) {
|
||||
if (rollup.videoId === null) continue;
|
||||
const title = resolveVideoAnimeTitle(rollup.videoId, titlesByVideoId);
|
||||
const acc = ensure(title);
|
||||
acc.watchTimeMin += rollup.totalActiveMin;
|
||||
acc.cards += rollup.totalCards;
|
||||
acc.words += rollup.totalTokensSeen;
|
||||
acc.videos.add(rollup.videoId);
|
||||
if (rollup.rollupDayOrMonth < acc.firstWatched) {
|
||||
acc.firstWatched = rollup.rollupDayOrMonth;
|
||||
}
|
||||
if (rollup.rollupDayOrMonth > acc.lastWatched) {
|
||||
acc.lastWatched = rollup.rollupDayOrMonth;
|
||||
}
|
||||
}
|
||||
|
||||
for (const session of sessions) {
|
||||
const title = resolveTrendAnimeTitle(session);
|
||||
if (!byTitle.has(title)) continue;
|
||||
const acc = byTitle.get(title)!;
|
||||
acc.sessions += 1;
|
||||
acc.lookups += session.yomitanLookupCount;
|
||||
}
|
||||
|
||||
const rows: LibrarySummaryRow[] = [];
|
||||
for (const [title, acc] of byTitle) {
|
||||
if (!Number.isFinite(acc.firstWatched) || !Number.isFinite(acc.lastWatched)) {
|
||||
continue;
|
||||
}
|
||||
rows.push({
|
||||
title,
|
||||
watchTimeMin: Math.round(acc.watchTimeMin),
|
||||
videos: acc.videos.size,
|
||||
sessions: acc.sessions,
|
||||
cards: acc.cards,
|
||||
words: acc.words,
|
||||
lookups: acc.lookups,
|
||||
lookupsPerHundred: acc.words > 0 ? +((acc.lookups / acc.words) * 100).toFixed(1) : null,
|
||||
firstWatched: acc.firstWatched,
|
||||
lastWatched: acc.lastWatched,
|
||||
});
|
||||
}
|
||||
|
||||
rows.sort((a, b) => b.watchTimeMin - a.watchTimeMin || a.title.localeCompare(b.title));
|
||||
return rows;
|
||||
}
|
||||
|
||||
function getVideoAnimeTitleMap(
|
||||
db: DatabaseSync,
|
||||
videoIds: Array<number | null>,
|
||||
@@ -662,8 +696,6 @@ export function getTrendsDashboard(
|
||||
titlesByVideoId,
|
||||
(rollup) => rollup.totalTokensSeen,
|
||||
),
|
||||
lookups: buildPerAnimeFromSessions(sessions, (session) => session.yomitanLookupCount),
|
||||
lookupsPerHundred: buildLookupsPerHundredPerAnime(sessions),
|
||||
};
|
||||
|
||||
return {
|
||||
@@ -690,7 +722,6 @@ export function getTrendsDashboard(
|
||||
ratios: {
|
||||
lookupsPerHundred: buildLookupsPerHundredWords(sessions, groupBy),
|
||||
},
|
||||
animePerDay,
|
||||
animeCumulative: {
|
||||
watchTime: buildCumulativePerAnime(animePerDay.watchTime),
|
||||
episodes: buildCumulativePerAnime(animePerDay.episodes),
|
||||
@@ -701,5 +732,6 @@ export function getTrendsDashboard(
|
||||
watchTimeByDayOfWeek: buildWatchTimeByDayOfWeek(sessions),
|
||||
watchTimeByHour: buildWatchTimeByHour(sessions),
|
||||
},
|
||||
librarySummary: buildLibrarySummary(dailyRollups, sessions, titlesByVideoId),
|
||||
};
|
||||
}
|
||||
|
||||
@@ -263,6 +263,372 @@ test('ensureSchema adds youtube metadata table to existing schema version 15 dat
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema migrates session event timestamps to text and repairs libsql-truncated wall-clock values', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
try {
|
||||
db.exec(`
|
||||
CREATE TABLE imm_schema_version (
|
||||
schema_version INTEGER PRIMARY KEY,
|
||||
applied_at_ms INTEGER NOT NULL
|
||||
);
|
||||
INSERT INTO imm_schema_version(schema_version, applied_at_ms) VALUES (16, 1000);
|
||||
|
||||
CREATE TABLE imm_rollup_state(
|
||||
state_key TEXT PRIMARY KEY,
|
||||
state_value INTEGER NOT NULL
|
||||
);
|
||||
INSERT INTO imm_rollup_state(state_key, state_value) VALUES ('last_rollup_sample_ms', 0);
|
||||
|
||||
CREATE TABLE imm_anime(
|
||||
anime_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
normalized_title_key TEXT NOT NULL UNIQUE,
|
||||
canonical_title TEXT NOT NULL,
|
||||
anilist_id INTEGER UNIQUE,
|
||||
title_romaji TEXT,
|
||||
title_english TEXT,
|
||||
title_native TEXT,
|
||||
episodes_total INTEGER,
|
||||
description TEXT,
|
||||
metadata_json TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE imm_videos(
|
||||
video_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
video_key TEXT NOT NULL UNIQUE,
|
||||
anime_id INTEGER,
|
||||
canonical_title TEXT NOT NULL,
|
||||
source_type INTEGER NOT NULL,
|
||||
source_path TEXT,
|
||||
source_url TEXT,
|
||||
parsed_basename TEXT,
|
||||
parsed_title TEXT,
|
||||
parsed_season INTEGER,
|
||||
parsed_episode INTEGER,
|
||||
parser_source TEXT,
|
||||
parser_confidence REAL,
|
||||
parse_metadata_json TEXT,
|
||||
watched INTEGER NOT NULL DEFAULT 0,
|
||||
duration_ms INTEGER NOT NULL CHECK(duration_ms>=0),
|
||||
file_size_bytes INTEGER CHECK(file_size_bytes>=0),
|
||||
codec_id INTEGER, container_id INTEGER,
|
||||
width_px INTEGER, height_px INTEGER, fps_x100 INTEGER,
|
||||
bitrate_kbps INTEGER, audio_codec_id INTEGER,
|
||||
hash_sha256 TEXT, screenshot_path TEXT,
|
||||
metadata_json TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE imm_sessions(
|
||||
session_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_uuid TEXT NOT NULL UNIQUE,
|
||||
video_id INTEGER NOT NULL,
|
||||
started_at_ms TEXT NOT NULL,
|
||||
ended_at_ms TEXT,
|
||||
status INTEGER NOT NULL,
|
||||
locale_id INTEGER,
|
||||
target_lang_id INTEGER,
|
||||
difficulty_tier INTEGER,
|
||||
subtitle_mode INTEGER,
|
||||
ended_media_ms INTEGER,
|
||||
total_watched_ms INTEGER NOT NULL DEFAULT 0,
|
||||
active_watched_ms INTEGER NOT NULL DEFAULT 0,
|
||||
lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
cards_mined INTEGER NOT NULL DEFAULT 0,
|
||||
lookup_count INTEGER NOT NULL DEFAULT 0,
|
||||
lookup_hits INTEGER NOT NULL DEFAULT 0,
|
||||
yomitan_lookup_count INTEGER NOT NULL DEFAULT 0,
|
||||
pause_count INTEGER NOT NULL DEFAULT 0,
|
||||
pause_ms INTEGER NOT NULL DEFAULT 0,
|
||||
seek_forward_count INTEGER NOT NULL DEFAULT 0,
|
||||
seek_backward_count INTEGER NOT NULL DEFAULT 0,
|
||||
media_buffer_events INTEGER NOT NULL DEFAULT 0,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id)
|
||||
);
|
||||
|
||||
CREATE TABLE imm_session_telemetry(
|
||||
telemetry_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
sample_ms TEXT NOT NULL,
|
||||
total_watched_ms INTEGER NOT NULL DEFAULT 0,
|
||||
active_watched_ms INTEGER NOT NULL DEFAULT 0,
|
||||
lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
cards_mined INTEGER NOT NULL DEFAULT 0,
|
||||
lookup_count INTEGER NOT NULL DEFAULT 0,
|
||||
lookup_hits INTEGER NOT NULL DEFAULT 0,
|
||||
yomitan_lookup_count INTEGER NOT NULL DEFAULT 0,
|
||||
pause_count INTEGER NOT NULL DEFAULT 0,
|
||||
pause_ms INTEGER NOT NULL DEFAULT 0,
|
||||
seek_forward_count INTEGER NOT NULL DEFAULT 0,
|
||||
seek_backward_count INTEGER NOT NULL DEFAULT 0,
|
||||
media_buffer_events INTEGER NOT NULL DEFAULT 0,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_session_events(
|
||||
event_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
ts_ms INTEGER NOT NULL,
|
||||
event_type INTEGER NOT NULL,
|
||||
line_index INTEGER,
|
||||
segment_start_ms INTEGER,
|
||||
segment_end_ms INTEGER,
|
||||
tokens_delta INTEGER NOT NULL DEFAULT 0,
|
||||
cards_delta INTEGER NOT NULL DEFAULT 0,
|
||||
payload_json TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_daily_rollups(
|
||||
rollup_day INTEGER NOT NULL,
|
||||
video_id INTEGER,
|
||||
total_sessions INTEGER NOT NULL DEFAULT 0,
|
||||
total_active_min REAL NOT NULL DEFAULT 0,
|
||||
total_lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_cards INTEGER NOT NULL DEFAULT 0,
|
||||
cards_per_hour REAL,
|
||||
tokens_per_min REAL,
|
||||
lookup_hit_rate REAL,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
PRIMARY KEY (rollup_day, video_id)
|
||||
);
|
||||
|
||||
CREATE TABLE imm_monthly_rollups(
|
||||
rollup_month INTEGER NOT NULL,
|
||||
video_id INTEGER,
|
||||
total_sessions INTEGER NOT NULL DEFAULT 0,
|
||||
total_active_min REAL NOT NULL DEFAULT 0,
|
||||
total_lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_cards INTEGER NOT NULL DEFAULT 0,
|
||||
cards_per_hour REAL,
|
||||
tokens_per_min REAL,
|
||||
lookup_hit_rate REAL,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
PRIMARY KEY (rollup_month, video_id)
|
||||
);
|
||||
|
||||
CREATE TABLE imm_words(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
headword TEXT NOT NULL,
|
||||
word TEXT NOT NULL,
|
||||
reading TEXT NOT NULL,
|
||||
part_of_speech TEXT,
|
||||
pos1 TEXT,
|
||||
pos2 TEXT,
|
||||
pos3 TEXT,
|
||||
first_seen INTEGER NOT NULL,
|
||||
last_seen INTEGER NOT NULL,
|
||||
frequency INTEGER NOT NULL DEFAULT 0,
|
||||
frequency_rank INTEGER,
|
||||
UNIQUE(headword, word, reading)
|
||||
);
|
||||
|
||||
CREATE TABLE imm_kanji(
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
kanji TEXT NOT NULL UNIQUE,
|
||||
first_seen INTEGER NOT NULL,
|
||||
last_seen INTEGER NOT NULL,
|
||||
frequency INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
|
||||
CREATE TABLE imm_subtitle_lines(
|
||||
line_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
event_id INTEGER,
|
||||
video_id INTEGER NOT NULL,
|
||||
anime_id INTEGER,
|
||||
line_index INTEGER NOT NULL,
|
||||
segment_start_ms INTEGER,
|
||||
segment_end_ms INTEGER,
|
||||
text TEXT NOT NULL,
|
||||
secondary_text TEXT,
|
||||
CREATED_DATE INTEGER,
|
||||
LAST_UPDATE_DATE INTEGER,
|
||||
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(event_id) REFERENCES imm_session_events(event_id) ON DELETE SET NULL,
|
||||
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE SET NULL
|
||||
);
|
||||
|
||||
CREATE TABLE imm_word_line_occurrences(
|
||||
line_id INTEGER NOT NULL,
|
||||
word_id INTEGER NOT NULL,
|
||||
occurrence_count INTEGER NOT NULL,
|
||||
PRIMARY KEY(line_id, word_id),
|
||||
FOREIGN KEY(line_id) REFERENCES imm_subtitle_lines(line_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(word_id) REFERENCES imm_words(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_kanji_line_occurrences(
|
||||
line_id INTEGER NOT NULL,
|
||||
kanji_id INTEGER NOT NULL,
|
||||
occurrence_count INTEGER NOT NULL,
|
||||
PRIMARY KEY(line_id, kanji_id),
|
||||
FOREIGN KEY(line_id) REFERENCES imm_subtitle_lines(line_id) ON DELETE CASCADE,
|
||||
FOREIGN KEY(kanji_id) REFERENCES imm_kanji(id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_lifetime_global(
|
||||
global_id INTEGER PRIMARY KEY CHECK(global_id = 1),
|
||||
total_sessions INTEGER NOT NULL DEFAULT 0,
|
||||
total_active_ms INTEGER NOT NULL DEFAULT 0,
|
||||
total_cards INTEGER NOT NULL DEFAULT 0,
|
||||
active_days INTEGER NOT NULL DEFAULT 0,
|
||||
episodes_started INTEGER NOT NULL DEFAULT 0,
|
||||
episodes_completed INTEGER NOT NULL DEFAULT 0,
|
||||
anime_completed INTEGER NOT NULL DEFAULT 0,
|
||||
last_rebuilt_ms TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE imm_lifetime_anime(
|
||||
anime_id INTEGER PRIMARY KEY,
|
||||
total_sessions INTEGER NOT NULL DEFAULT 0,
|
||||
total_active_ms INTEGER NOT NULL DEFAULT 0,
|
||||
total_cards INTEGER NOT NULL DEFAULT 0,
|
||||
total_lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
episodes_started INTEGER NOT NULL DEFAULT 0,
|
||||
episodes_completed INTEGER NOT NULL DEFAULT 0,
|
||||
first_watched_ms TEXT,
|
||||
last_watched_ms TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(anime_id) REFERENCES imm_anime(anime_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_lifetime_media(
|
||||
video_id INTEGER PRIMARY KEY,
|
||||
total_sessions INTEGER NOT NULL DEFAULT 0,
|
||||
total_active_ms INTEGER NOT NULL DEFAULT 0,
|
||||
total_cards INTEGER NOT NULL DEFAULT 0,
|
||||
total_lines_seen INTEGER NOT NULL DEFAULT 0,
|
||||
total_tokens_seen INTEGER NOT NULL DEFAULT 0,
|
||||
completed INTEGER NOT NULL DEFAULT 0,
|
||||
first_watched_ms TEXT,
|
||||
last_watched_ms TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_lifetime_applied_sessions(
|
||||
session_id INTEGER PRIMARY KEY,
|
||||
applied_at_ms TEXT NOT NULL,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_media_art(
|
||||
video_id INTEGER PRIMARY KEY,
|
||||
anilist_id INTEGER,
|
||||
cover_url TEXT,
|
||||
cover_blob BLOB,
|
||||
cover_blob_hash TEXT,
|
||||
fetched_at_ms TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
CREATE TABLE imm_cover_art_blobs(
|
||||
blob_hash TEXT PRIMARY KEY,
|
||||
cover_blob BLOB NOT NULL,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT
|
||||
);
|
||||
|
||||
CREATE TABLE imm_youtube_videos(
|
||||
video_id INTEGER PRIMARY KEY,
|
||||
youtube_video_id TEXT,
|
||||
video_url TEXT,
|
||||
video_title TEXT,
|
||||
video_thumbnail_url TEXT,
|
||||
channel_id TEXT,
|
||||
channel_name TEXT,
|
||||
channel_url TEXT,
|
||||
channel_thumbnail_url TEXT,
|
||||
uploader_id TEXT,
|
||||
uploader_url TEXT,
|
||||
description TEXT,
|
||||
metadata_json TEXT,
|
||||
fetched_at_ms TEXT NOT NULL,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(video_id) REFERENCES imm_videos(video_id) ON DELETE CASCADE
|
||||
);
|
||||
|
||||
INSERT INTO imm_videos (
|
||||
video_id, video_key, canonical_title, source_type, source_path, source_url, watched, duration_ms,
|
||||
CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 'local:/tmp/repaired-event.mkv', 'Repaired Event', 1, '/tmp/repaired-event.mkv', NULL, 0, 0, '1000', '1000'
|
||||
);
|
||||
|
||||
INSERT INTO imm_sessions (
|
||||
session_id, session_uuid, video_id, started_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 'session-1', 1, '1775940000000', 1, '1775940000000', '1775940000000'
|
||||
);
|
||||
|
||||
INSERT INTO imm_session_events (
|
||||
event_id, session_id, ts_ms, event_type, line_index, segment_start_ms, segment_end_ms,
|
||||
tokens_delta, cards_delta, payload_json, CREATED_DATE, LAST_UPDATE_DATE
|
||||
) VALUES (
|
||||
1, 1, -2147483648, 4, NULL, NULL, NULL, 0, 1, '{\"noteIds\":[1]}', '1775943304128', '1775943304128'
|
||||
);
|
||||
`);
|
||||
|
||||
ensureSchema(db);
|
||||
|
||||
const column = db.prepare(`PRAGMA table_info(imm_session_events)`).all() as Array<{
|
||||
name: string;
|
||||
type: string;
|
||||
}>;
|
||||
assert.equal(column.find((entry) => entry.name === 'ts_ms')?.type, 'TEXT');
|
||||
|
||||
const row = db
|
||||
.prepare(
|
||||
`
|
||||
SELECT ts_ms AS tsMs, typeof(ts_ms) AS tsType, CREATED_DATE AS createdDate
|
||||
FROM imm_session_events
|
||||
WHERE event_id = 1
|
||||
`,
|
||||
)
|
||||
.get() as {
|
||||
tsMs: string;
|
||||
tsType: string;
|
||||
createdDate: string;
|
||||
};
|
||||
|
||||
assert.equal(row.tsType, 'text');
|
||||
assert.equal(row.tsMs, '1775943304128');
|
||||
assert.equal(row.createdDate, '1775943304128');
|
||||
} finally {
|
||||
db.close();
|
||||
cleanupDbPath(dbPath);
|
||||
}
|
||||
});
|
||||
|
||||
test('ensureSchema creates large-history performance indexes', () => {
|
||||
const dbPath = makeDbPath();
|
||||
const db = new Database(dbPath);
|
||||
|
||||
@@ -170,6 +170,16 @@ function hasColumn(db: DatabaseSync, tableName: string, columnName: string): boo
|
||||
.some((row: unknown) => (row as { name: string }).name === columnName);
|
||||
}
|
||||
|
||||
function getColumnType(db: DatabaseSync, tableName: string, columnName: string): string | null {
|
||||
const row = (
|
||||
db.prepare(`PRAGMA table_info(${tableName})`).all() as Array<{
|
||||
name: string;
|
||||
type: string;
|
||||
}>
|
||||
).find((entry) => entry.name === columnName);
|
||||
return row?.type ?? null;
|
||||
}
|
||||
|
||||
function addColumnIfMissing(
|
||||
db: DatabaseSync,
|
||||
tableName: string,
|
||||
@@ -187,6 +197,92 @@ function dropColumnIfExists(db: DatabaseSync, tableName: string, columnName: str
|
||||
}
|
||||
}
|
||||
|
||||
function migrateSessionEventTimestampsToText(db: DatabaseSync): void {
|
||||
if (getColumnType(db, 'imm_session_events', 'ts_ms') === 'TEXT') {
|
||||
return;
|
||||
}
|
||||
|
||||
const lineIndexExpr = hasColumn(db, 'imm_session_events', 'line_index') ? 'line_index' : 'NULL';
|
||||
const segmentStartExpr = hasColumn(db, 'imm_session_events', 'segment_start_ms')
|
||||
? 'segment_start_ms'
|
||||
: 'NULL';
|
||||
const segmentEndExpr = hasColumn(db, 'imm_session_events', 'segment_end_ms')
|
||||
? 'segment_end_ms'
|
||||
: 'NULL';
|
||||
const tokensDeltaExpr = hasColumn(db, 'imm_session_events', 'tokens_delta')
|
||||
? 'tokens_delta'
|
||||
: '0';
|
||||
const cardsDeltaExpr = hasColumn(db, 'imm_session_events', 'cards_delta') ? 'cards_delta' : '0';
|
||||
const payloadExpr = hasColumn(db, 'imm_session_events', 'payload_json') ? 'payload_json' : 'NULL';
|
||||
const createdDateExpr = hasColumn(db, 'imm_session_events', 'CREATED_DATE')
|
||||
? 'CAST(CREATED_DATE AS TEXT)'
|
||||
: 'NULL';
|
||||
const lastUpdateExpr = hasColumn(db, 'imm_session_events', 'LAST_UPDATE_DATE')
|
||||
? 'CAST(LAST_UPDATE_DATE AS TEXT)'
|
||||
: 'NULL';
|
||||
const repairedTimestampExpr =
|
||||
hasColumn(db, 'imm_session_events', 'CREATED_DATE') ||
|
||||
hasColumn(db, 'imm_session_events', 'LAST_UPDATE_DATE')
|
||||
? `CASE
|
||||
WHEN ts_ms < 0 AND COALESCE(CREATED_DATE, LAST_UPDATE_DATE) IS NOT NULL
|
||||
THEN CAST(COALESCE(CREATED_DATE, LAST_UPDATE_DATE) AS TEXT)
|
||||
ELSE CAST(ts_ms AS TEXT)
|
||||
END`
|
||||
: 'CAST(ts_ms AS TEXT)';
|
||||
|
||||
db.exec('PRAGMA foreign_keys = OFF');
|
||||
db.exec(`
|
||||
CREATE TABLE imm_session_events_new(
|
||||
event_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
ts_ms TEXT NOT NULL,
|
||||
event_type INTEGER NOT NULL,
|
||||
line_index INTEGER,
|
||||
segment_start_ms INTEGER,
|
||||
segment_end_ms INTEGER,
|
||||
tokens_delta INTEGER NOT NULL DEFAULT 0,
|
||||
cards_delta INTEGER NOT NULL DEFAULT 0,
|
||||
payload_json TEXT,
|
||||
CREATED_DATE TEXT,
|
||||
LAST_UPDATE_DATE TEXT,
|
||||
FOREIGN KEY(session_id) REFERENCES imm_sessions(session_id) ON DELETE CASCADE
|
||||
);
|
||||
`);
|
||||
db.exec(`
|
||||
INSERT INTO imm_session_events_new(
|
||||
event_id,
|
||||
session_id,
|
||||
ts_ms,
|
||||
event_type,
|
||||
line_index,
|
||||
segment_start_ms,
|
||||
segment_end_ms,
|
||||
tokens_delta,
|
||||
cards_delta,
|
||||
payload_json,
|
||||
CREATED_DATE,
|
||||
LAST_UPDATE_DATE
|
||||
)
|
||||
SELECT
|
||||
event_id,
|
||||
session_id,
|
||||
${repairedTimestampExpr},
|
||||
event_type,
|
||||
${lineIndexExpr},
|
||||
${segmentStartExpr},
|
||||
${segmentEndExpr},
|
||||
${tokensDeltaExpr},
|
||||
${cardsDeltaExpr},
|
||||
${payloadExpr},
|
||||
${createdDateExpr},
|
||||
${lastUpdateExpr}
|
||||
FROM imm_session_events
|
||||
`);
|
||||
db.exec('DROP TABLE imm_session_events');
|
||||
db.exec('ALTER TABLE imm_session_events_new RENAME TO imm_session_events');
|
||||
db.exec('PRAGMA foreign_keys = ON');
|
||||
}
|
||||
|
||||
export function applyPragmas(db: DatabaseSync): void {
|
||||
db.exec('PRAGMA journal_mode = WAL');
|
||||
db.exec('PRAGMA synchronous = NORMAL');
|
||||
@@ -685,7 +781,7 @@ export function ensureSchema(db: DatabaseSync): void {
|
||||
CREATE TABLE IF NOT EXISTS imm_session_events(
|
||||
event_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
session_id INTEGER NOT NULL,
|
||||
ts_ms INTEGER NOT NULL,
|
||||
ts_ms TEXT NOT NULL,
|
||||
event_type INTEGER NOT NULL,
|
||||
line_index INTEGER,
|
||||
segment_start_ms INTEGER,
|
||||
@@ -1122,6 +1218,8 @@ export function ensureSchema(db: DatabaseSync): void {
|
||||
addColumnIfMissing(db, 'imm_sessions', 'ended_media_ms', 'INTEGER');
|
||||
}
|
||||
|
||||
migrateSessionEventTimestampsToText(db);
|
||||
|
||||
ensureLifetimeSummaryTables(db);
|
||||
|
||||
db.exec(`
|
||||
@@ -1420,7 +1518,8 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
|
||||
) {
|
||||
throw new Error('Incomplete telemetry write');
|
||||
}
|
||||
const telemetrySampleMs = toDbTimestamp(write.sampleMs ?? Number(currentMs));
|
||||
const telemetrySampleMs =
|
||||
write.sampleMs === undefined ? currentMs : toDbTimestamp(write.sampleMs);
|
||||
stmts.telemetryInsertStmt.run(
|
||||
write.sessionId,
|
||||
telemetrySampleMs,
|
||||
@@ -1495,7 +1594,7 @@ export function executeQueuedWrite(write: QueuedWrite, stmts: TrackerPreparedSta
|
||||
|
||||
stmts.eventInsertStmt.run(
|
||||
write.sessionId,
|
||||
toDbTimestamp(write.sampleMs ?? Number(currentMs)),
|
||||
write.sampleMs === undefined ? currentMs : toDbTimestamp(write.sampleMs),
|
||||
write.eventType ?? 0,
|
||||
write.lineIndex ?? null,
|
||||
write.segmentStartMs ?? null,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export const SCHEMA_VERSION = 16;
|
||||
export const SCHEMA_VERSION = 17;
|
||||
export const DEFAULT_QUEUE_CAP = 1_000;
|
||||
export const DEFAULT_BATCH_SIZE = 25;
|
||||
export const DEFAULT_FLUSH_INTERVAL_MS = 500;
|
||||
|
||||
@@ -886,17 +886,47 @@ test('registerIpcHandlers validates dispatchSessionAction payloads', async () =>
|
||||
await dispatchHandler!({}, { actionId: 'unknown-action' });
|
||||
}, /Invalid session action payload/);
|
||||
|
||||
await dispatchHandler!({}, {
|
||||
actionId: 'copySubtitleMultiple',
|
||||
payload: { count: 3 },
|
||||
});
|
||||
await dispatchHandler!({}, {
|
||||
actionId: 'cycleRuntimeOption',
|
||||
payload: {
|
||||
runtimeOptionId: 'anki.autoUpdateNewCards',
|
||||
direction: -1,
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'copySubtitleMultiple',
|
||||
payload: { count: 3 },
|
||||
},
|
||||
});
|
||||
);
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'cycleRuntimeOption',
|
||||
payload: {
|
||||
runtimeOptionId: 'anki.autoUpdateNewCards',
|
||||
direction: -1,
|
||||
},
|
||||
},
|
||||
);
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'toggleSubtitleSidebar',
|
||||
},
|
||||
);
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'openSessionHelp',
|
||||
},
|
||||
);
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'openControllerSelect',
|
||||
},
|
||||
);
|
||||
await dispatchHandler!(
|
||||
{},
|
||||
{
|
||||
actionId: 'openControllerDebug',
|
||||
},
|
||||
);
|
||||
|
||||
assert.deepEqual(dispatched, [
|
||||
{
|
||||
@@ -910,6 +940,18 @@ test('registerIpcHandlers validates dispatchSessionAction payloads', async () =>
|
||||
direction: -1,
|
||||
},
|
||||
},
|
||||
{
|
||||
actionId: 'toggleSubtitleSidebar',
|
||||
},
|
||||
{
|
||||
actionId: 'openSessionHelp',
|
||||
},
|
||||
{
|
||||
actionId: 'openControllerSelect',
|
||||
},
|
||||
{
|
||||
actionId: 'openControllerDebug',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
|
||||
@@ -45,11 +45,7 @@ test('collectDroppedVideoPaths parses text/uri-list entries and de-duplicates',
|
||||
|
||||
test('collectDroppedSubtitlePaths keeps supported dropped subtitle paths in order', () => {
|
||||
const transfer = makeTransfer({
|
||||
files: [
|
||||
{ path: '/subs/ep02.ass' },
|
||||
{ path: '/subs/readme.txt' },
|
||||
{ path: '/subs/ep03.SRT' },
|
||||
],
|
||||
files: [{ path: '/subs/ep02.ass' }, { path: '/subs/readme.txt' }, { path: '/subs/ep03.SRT' }],
|
||||
});
|
||||
|
||||
const result = collectDroppedSubtitlePaths(transfer);
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
|
||||
import { OVERLAY_WINDOW_CONTENT_READY_FLAG } from './overlay-window-flags';
|
||||
import { setVisibleOverlayVisible, updateVisibleOverlayVisibility } from './overlay-visibility';
|
||||
|
||||
type WindowTrackerStub = {
|
||||
@@ -15,7 +16,9 @@ function createMainWindowRecorder() {
|
||||
let visible = false;
|
||||
let focused = false;
|
||||
let opacity = 1;
|
||||
let contentReady = true;
|
||||
const window = {
|
||||
webContents: {},
|
||||
isDestroyed: () => false,
|
||||
isVisible: () => visible,
|
||||
isFocused: () => focused,
|
||||
@@ -50,11 +53,24 @@ function createMainWindowRecorder() {
|
||||
calls.push('move-top');
|
||||
},
|
||||
};
|
||||
(
|
||||
window as {
|
||||
[OVERLAY_WINDOW_CONTENT_READY_FLAG]?: boolean;
|
||||
}
|
||||
)[OVERLAY_WINDOW_CONTENT_READY_FLAG] = contentReady;
|
||||
|
||||
return {
|
||||
window,
|
||||
calls,
|
||||
getOpacity: () => opacity,
|
||||
setContentReady: (nextContentReady: boolean) => {
|
||||
contentReady = nextContentReady;
|
||||
(
|
||||
window as {
|
||||
[OVERLAY_WINDOW_CONTENT_READY_FLAG]?: boolean;
|
||||
}
|
||||
)[OVERLAY_WINDOW_CONTENT_READY_FLAG] = contentReady;
|
||||
},
|
||||
setFocused: (nextFocused: boolean) => {
|
||||
focused = nextFocused;
|
||||
},
|
||||
@@ -285,6 +301,54 @@ test('Windows visible overlay restores opacity after the deferred reveal delay',
|
||||
assert.ok(calls.includes('opacity:1'));
|
||||
});
|
||||
|
||||
test('Windows visible overlay waits for content-ready before first reveal', () => {
|
||||
const { window, calls, setContentReady } = createMainWindowRecorder();
|
||||
const tracker: WindowTrackerStub = {
|
||||
isTracking: () => true,
|
||||
getGeometry: () => ({ x: 0, y: 0, width: 1280, height: 720 }),
|
||||
};
|
||||
setContentReady(false);
|
||||
|
||||
const run = () =>
|
||||
updateVisibleOverlayVisibility({
|
||||
visibleOverlayVisible: true,
|
||||
mainWindow: window as never,
|
||||
windowTracker: tracker as never,
|
||||
trackerNotReadyWarningShown: false,
|
||||
setTrackerNotReadyWarningShown: () => {},
|
||||
updateVisibleOverlayBounds: () => {
|
||||
calls.push('update-bounds');
|
||||
},
|
||||
ensureOverlayWindowLevel: () => {
|
||||
calls.push('ensure-level');
|
||||
},
|
||||
syncWindowsOverlayToMpvZOrder: () => {
|
||||
calls.push('sync-windows-z-order');
|
||||
},
|
||||
syncPrimaryOverlayWindowLayer: () => {
|
||||
calls.push('sync-layer');
|
||||
},
|
||||
enforceOverlayLayerOrder: () => {
|
||||
calls.push('enforce-order');
|
||||
},
|
||||
syncOverlayShortcuts: () => {
|
||||
calls.push('sync-shortcuts');
|
||||
},
|
||||
isMacOSPlatform: false,
|
||||
isWindowsPlatform: true,
|
||||
} as never);
|
||||
|
||||
run();
|
||||
|
||||
assert.ok(!calls.includes('show-inactive'));
|
||||
assert.ok(!calls.includes('show'));
|
||||
|
||||
setContentReady(true);
|
||||
run();
|
||||
|
||||
assert.ok(calls.includes('show-inactive'));
|
||||
});
|
||||
|
||||
test('tracked Windows overlay refresh rebinds while already visible', () => {
|
||||
const { window, calls } = createMainWindowRecorder();
|
||||
const tracker: WindowTrackerStub = {
|
||||
|
||||
@@ -158,18 +158,24 @@ export function updateVisibleOverlayVisibility(args: {
|
||||
setOverlayWindowOpacity(mainWindow, 0);
|
||||
mainWindow.showInactive();
|
||||
mainWindow.setIgnoreMouseEvents(true, { forward: true });
|
||||
scheduleWindowsOverlayReveal(mainWindow, shouldBindTrackedWindowsOverlay
|
||||
? (window) => args.syncWindowsOverlayToMpvZOrder?.(window)
|
||||
: undefined);
|
||||
scheduleWindowsOverlayReveal(
|
||||
mainWindow,
|
||||
shouldBindTrackedWindowsOverlay
|
||||
? (window) => args.syncWindowsOverlayToMpvZOrder?.(window)
|
||||
: undefined,
|
||||
);
|
||||
} else {
|
||||
if (args.isWindowsPlatform) {
|
||||
setOverlayWindowOpacity(mainWindow, 0);
|
||||
}
|
||||
mainWindow.show();
|
||||
if (args.isWindowsPlatform) {
|
||||
scheduleWindowsOverlayReveal(mainWindow, shouldBindTrackedWindowsOverlay
|
||||
? (window) => args.syncWindowsOverlayToMpvZOrder?.(window)
|
||||
: undefined);
|
||||
scheduleWindowsOverlayReveal(
|
||||
mainWindow,
|
||||
shouldBindTrackedWindowsOverlay
|
||||
? (window) => args.syncWindowsOverlayToMpvZOrder?.(window)
|
||||
: undefined,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,6 +34,10 @@ function resolveCount(count: number | undefined): number {
|
||||
return Math.min(9, Math.max(1, normalized));
|
||||
}
|
||||
|
||||
function assertUnreachableSessionAction(actionId: never): never {
|
||||
throw new Error(`Unhandled session action: ${String(actionId)}`);
|
||||
}
|
||||
|
||||
export async function dispatchSessionAction(
|
||||
request: SessionActionDispatchRequest,
|
||||
deps: SessionActionExecutorDeps,
|
||||
@@ -121,5 +125,7 @@ export async function dispatchSessionAction(
|
||||
}
|
||||
return;
|
||||
}
|
||||
default:
|
||||
return assertUnreachableSessionAction(request.actionId);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,9 +54,7 @@ test('compileSessionBindings merges shortcuts and keybindings into one canonical
|
||||
code: binding.key.code,
|
||||
modifiers: binding.key.modifiers,
|
||||
target:
|
||||
binding.actionType === 'session-action'
|
||||
? binding.actionId
|
||||
: binding.command.join(' '),
|
||||
binding.actionType === 'session-action' ? binding.actionId : binding.command.join(' '),
|
||||
})),
|
||||
[
|
||||
{
|
||||
@@ -191,9 +189,10 @@ test('compileSessionBindings omits disabled bindings', () => {
|
||||
});
|
||||
|
||||
assert.equal(result.warnings.length, 0);
|
||||
assert.deepEqual(result.bindings.map((binding) => binding.sourcePath), [
|
||||
'shortcuts.toggleVisibleOverlayGlobal',
|
||||
]);
|
||||
assert.deepEqual(
|
||||
result.bindings.map((binding) => binding.sourcePath),
|
||||
['shortcuts.toggleVisibleOverlayGlobal'],
|
||||
);
|
||||
});
|
||||
|
||||
test('compileSessionBindings warns on unsupported shortcut and keybinding syntax', () => {
|
||||
@@ -222,12 +221,47 @@ test('compileSessionBindings rejects malformed command arrays', () => {
|
||||
platform: 'linux',
|
||||
});
|
||||
|
||||
assert.deepEqual(result.bindings.map((binding) => binding.sourcePath), ['keybindings[0].key']);
|
||||
assert.deepEqual(
|
||||
result.bindings.map((binding) => binding.sourcePath),
|
||||
['keybindings[0].key'],
|
||||
);
|
||||
assert.equal(result.bindings[0]?.actionType, 'mpv-command');
|
||||
assert.deepEqual(result.bindings[0]?.command, ['show-text', 3000]);
|
||||
assert.deepEqual(result.warnings.map((warning) => `${warning.kind}:${warning.path}`), [
|
||||
'unsupported:keybindings[1].key',
|
||||
]);
|
||||
assert.deepEqual(
|
||||
result.warnings.map((warning) => `${warning.kind}:${warning.path}`),
|
||||
['unsupported:keybindings[1].command'],
|
||||
);
|
||||
});
|
||||
|
||||
test('compileSessionBindings rejects non-string command heads and extra args on special commands', () => {
|
||||
const result = compileSessionBindings({
|
||||
shortcuts: createShortcuts(),
|
||||
keybindings: [
|
||||
createKeybinding('Ctrl+J', [42] as never),
|
||||
createKeybinding('Ctrl+K', [SPECIAL_COMMANDS.JIMAKU_OPEN, 'extra'] as never),
|
||||
],
|
||||
platform: 'linux',
|
||||
});
|
||||
|
||||
assert.deepEqual(result.bindings, []);
|
||||
assert.deepEqual(
|
||||
result.warnings.map((warning) => `${warning.kind}:${warning.path}`),
|
||||
['unsupported:keybindings[0].command', 'unsupported:keybindings[1].command'],
|
||||
);
|
||||
});
|
||||
|
||||
test('compileSessionBindings points unsupported command warnings at the command field', () => {
|
||||
const result = compileSessionBindings({
|
||||
shortcuts: createShortcuts(),
|
||||
keybindings: [createKeybinding('Ctrl+K', [SPECIAL_COMMANDS.JIMAKU_OPEN, 'extra'] as never)],
|
||||
platform: 'linux',
|
||||
});
|
||||
|
||||
assert.deepEqual(result.bindings, []);
|
||||
assert.deepEqual(
|
||||
result.warnings.map((warning) => `${warning.kind}:${warning.path}`),
|
||||
['unsupported:keybindings[0].command'],
|
||||
);
|
||||
});
|
||||
|
||||
test('compileSessionBindings warns on deprecated toggleVisibleOverlayGlobal config', () => {
|
||||
|
||||
@@ -268,40 +268,49 @@ function resolveCommandBinding(
|
||||
|
||||
const first = command[0];
|
||||
if (typeof first !== 'string') {
|
||||
return {
|
||||
actionType: 'mpv-command',
|
||||
command,
|
||||
};
|
||||
return null;
|
||||
}
|
||||
|
||||
if (first === SPECIAL_COMMANDS.SUBSYNC_TRIGGER) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'triggerSubsync' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.RUNTIME_OPTIONS_OPEN) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'openRuntimeOptions' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.JIMAKU_OPEN) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'openJimaku' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.YOUTUBE_PICKER_OPEN) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'openYoutubePicker' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.PLAYLIST_BROWSER_OPEN) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'openPlaylistBrowser' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.REPLAY_SUBTITLE) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'replayCurrentSubtitle' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.PLAY_NEXT_SUBTITLE) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'playNextSubtitle' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.SHIFT_SUB_DELAY_TO_PREVIOUS_SUBTITLE_START) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'shiftSubDelayPrevLine' };
|
||||
}
|
||||
if (first === SPECIAL_COMMANDS.SHIFT_SUB_DELAY_TO_NEXT_SUBTITLE_START) {
|
||||
if (command.length !== 1) return null;
|
||||
return { actionType: 'session-action', actionId: 'shiftSubDelayNextLine' };
|
||||
}
|
||||
if (first.startsWith(SPECIAL_COMMANDS.RUNTIME_OPTION_CYCLE_PREFIX)) {
|
||||
if (command.length !== 1) {
|
||||
return null;
|
||||
}
|
||||
const parts = first.split(':');
|
||||
if (parts.length !== 3) {
|
||||
return null;
|
||||
@@ -333,9 +342,7 @@ function getBindingFingerprint(binding: CompiledSessionBinding): string {
|
||||
return `session:${binding.actionId}:${JSON.stringify(binding.payload ?? null)}`;
|
||||
}
|
||||
|
||||
export function compileSessionBindings(
|
||||
input: CompileSessionBindingsInput,
|
||||
): {
|
||||
export function compileSessionBindings(input: CompileSessionBindingsInput): {
|
||||
bindings: CompiledSessionBinding[];
|
||||
warnings: SessionBindingWarning[];
|
||||
} {
|
||||
@@ -427,7 +434,7 @@ export function compileSessionBindings(
|
||||
if (!resolved) {
|
||||
warnings.push({
|
||||
kind: 'unsupported',
|
||||
path: `keybindings[${index}].key`,
|
||||
path: `keybindings[${index}].command`,
|
||||
value: binding.command,
|
||||
message: 'Unsupported keybinding command syntax.',
|
||||
});
|
||||
|
||||
@@ -311,7 +311,6 @@ export async function runAppReadyRuntime(deps: AppReadyRuntimeDeps): Promise<voi
|
||||
|
||||
deps.createSubtitleTimingTracker();
|
||||
if (deps.createImmersionTracker) {
|
||||
deps.createImmersionTracker();
|
||||
deps.log('Runtime ready: immersion tracker startup requested.');
|
||||
} else {
|
||||
deps.log('Runtime ready: immersion tracker dependency is missing.');
|
||||
|
||||
@@ -30,8 +30,10 @@ function parseIntQuery(raw: string | undefined, fallback: number, maxLimit?: num
|
||||
return maxLimit === undefined ? parsed : Math.min(parsed, maxLimit);
|
||||
}
|
||||
|
||||
function parseTrendRange(raw: string | undefined): '7d' | '30d' | '90d' | 'all' {
|
||||
return raw === '7d' || raw === '30d' || raw === '90d' || raw === 'all' ? raw : '30d';
|
||||
function parseTrendRange(raw: string | undefined): '7d' | '30d' | '90d' | '365d' | 'all' {
|
||||
return raw === '7d' || raw === '30d' || raw === '90d' || raw === '365d' || raw === 'all'
|
||||
? raw
|
||||
: '30d';
|
||||
}
|
||||
|
||||
function parseTrendGroupBy(raw: string | undefined): 'day' | 'month' {
|
||||
|
||||
67
src/main.ts
67
src/main.ts
@@ -415,7 +415,10 @@ import { createAnilistRateLimiter } from './core/services/anilist/rate-limiter';
|
||||
import { createJellyfinTokenStore } from './core/services/jellyfin-token-store';
|
||||
import { applyRuntimeOptionResultRuntime } from './core/services/runtime-options-ipc';
|
||||
import { createAnilistTokenStore } from './core/services/anilist/anilist-token-store';
|
||||
import { buildPluginSessionBindingsArtifact, compileSessionBindings } from './core/services/session-bindings';
|
||||
import {
|
||||
buildPluginSessionBindingsArtifact,
|
||||
compileSessionBindings,
|
||||
} from './core/services/session-bindings';
|
||||
import { dispatchSessionAction as dispatchSessionActionCore } from './core/services/session-actions';
|
||||
import { createBuildOverlayShortcutsRuntimeMainDepsHandler } from './main/runtime/domains/shortcuts';
|
||||
import { createMainRuntimeRegistry } from './main/runtime/registry';
|
||||
@@ -1544,8 +1547,8 @@ const buildConfigHotReloadAppliedMainDepsHandler = createBuildConfigHotReloadApp
|
||||
setKeybindings: (keybindings) => {
|
||||
appState.keybindings = keybindings;
|
||||
},
|
||||
setSessionBindings: (sessionBindings) => {
|
||||
persistSessionBindings(sessionBindings);
|
||||
setSessionBindings: (sessionBindings, sessionBindingWarnings) => {
|
||||
persistSessionBindings(sessionBindings, sessionBindingWarnings);
|
||||
},
|
||||
refreshGlobalAndOverlayShortcuts: () => {
|
||||
refreshGlobalAndOverlayShortcuts();
|
||||
@@ -1933,9 +1936,7 @@ function getWindowsNativeWindowHandle(window: BrowserWindow): string {
|
||||
|
||||
function getWindowsNativeWindowHandleNumber(window: BrowserWindow): number {
|
||||
const handle = window.getNativeWindowHandle();
|
||||
return handle.length >= 8
|
||||
? Number(handle.readBigUInt64LE(0))
|
||||
: handle.readUInt32LE(0);
|
||||
return handle.length >= 8 ? Number(handle.readBigUInt64LE(0)) : handle.readUInt32LE(0);
|
||||
}
|
||||
|
||||
function resolveWindowsOverlayBindTargetHandle(targetMpvSocketPath?: string | null): number | null {
|
||||
@@ -1944,7 +1945,15 @@ function resolveWindowsOverlayBindTargetHandle(targetMpvSocketPath?: string | nu
|
||||
}
|
||||
|
||||
try {
|
||||
void targetMpvSocketPath;
|
||||
if (targetMpvSocketPath) {
|
||||
const windowTracker = appState.windowTracker as {
|
||||
getTargetWindowHandle?: () => number | null;
|
||||
} | null;
|
||||
const trackedHandle = windowTracker?.getTargetWindowHandle?.();
|
||||
if (typeof trackedHandle === 'number' && Number.isFinite(trackedHandle)) {
|
||||
return trackedHandle;
|
||||
}
|
||||
}
|
||||
return findWindowsMpvTargetWindowHandle();
|
||||
} catch {
|
||||
return null;
|
||||
@@ -2245,14 +2254,16 @@ function openOverlayHostedModalWithOsd(
|
||||
unavailableMessage: string,
|
||||
failureLogMessage: string,
|
||||
): void {
|
||||
void openModal(createOverlayHostedModalOpenDeps()).then((opened) => {
|
||||
if (!opened) {
|
||||
void openModal(createOverlayHostedModalOpenDeps())
|
||||
.then((opened) => {
|
||||
if (!opened) {
|
||||
showMpvOsd(unavailableMessage);
|
||||
}
|
||||
})
|
||||
.catch((error) => {
|
||||
logger.error(failureLogMessage, error);
|
||||
showMpvOsd(unavailableMessage);
|
||||
}
|
||||
}).catch((error) => {
|
||||
logger.error(failureLogMessage, error);
|
||||
showMpvOsd(unavailableMessage);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function openRuntimeOptionsPalette(): void {
|
||||
@@ -3621,7 +3632,7 @@ function ensureOverlayStartupPrereqs(): void {
|
||||
if (appState.keybindings.length === 0) {
|
||||
appState.keybindings = resolveKeybindings(getResolvedConfig(), DEFAULT_KEYBINDINGS);
|
||||
refreshCurrentSessionBindings();
|
||||
} else if (appState.sessionBindings.length === 0) {
|
||||
} else if (!appState.sessionBindingsInitialized) {
|
||||
refreshCurrentSessionBindings();
|
||||
}
|
||||
if (!appState.mpvClient) {
|
||||
@@ -4244,15 +4255,14 @@ function persistSessionBindings(
|
||||
bindings: CompiledSessionBinding[],
|
||||
warnings: ReturnType<typeof compileSessionBindings>['warnings'] = [],
|
||||
): void {
|
||||
const artifact = buildPluginSessionBindingsArtifact({
|
||||
bindings,
|
||||
warnings,
|
||||
numericSelectionTimeoutMs: getConfiguredShortcuts().multiCopyTimeoutMs,
|
||||
});
|
||||
writeSessionBindingsArtifact(CONFIG_DIR, artifact);
|
||||
appState.sessionBindings = bindings;
|
||||
writeSessionBindingsArtifact(
|
||||
CONFIG_DIR,
|
||||
buildPluginSessionBindingsArtifact({
|
||||
bindings,
|
||||
warnings,
|
||||
numericSelectionTimeoutMs: getConfiguredShortcuts().multiCopyTimeoutMs,
|
||||
}),
|
||||
);
|
||||
appState.sessionBindingsInitialized = true;
|
||||
if (appState.mpvClient?.connected) {
|
||||
sendMpvCommandRuntime(appState.mpvClient, [
|
||||
'script-message',
|
||||
@@ -4923,7 +4933,8 @@ const { handleCliCommand, handleInitialArgs } = composeCliStartupHandlers({
|
||||
printHelp: () => printHelp(DEFAULT_TEXTHOOKER_PORT),
|
||||
stopApp: () => requestAppQuit(),
|
||||
hasMainWindow: () => Boolean(overlayManager.getMainWindow()),
|
||||
dispatchSessionAction: (request: SessionActionDispatchRequest) => dispatchSessionAction(request),
|
||||
dispatchSessionAction: (request: SessionActionDispatchRequest) =>
|
||||
dispatchSessionAction(request),
|
||||
getMultiCopyTimeoutMs: () => getConfiguredShortcuts().multiCopyTimeoutMs,
|
||||
schedule: (fn: () => void, delayMs: number) => setTimeout(fn, delayMs),
|
||||
logInfo: (message: string) => logger.info(message),
|
||||
@@ -5191,14 +5202,18 @@ const { initializeOverlayRuntime: initializeOverlayRuntimeHandler } =
|
||||
if (process.platform !== 'win32' || !mainWindow || mainWindow.isDestroyed()) return;
|
||||
const overlayHwnd = getWindowsNativeWindowHandleNumber(mainWindow);
|
||||
const targetWindowHwnd = resolveWindowsOverlayBindTargetHandle(appState.mpvSocketPath);
|
||||
if (targetWindowHwnd !== null && bindWindowsOverlayAboveMpv(overlayHwnd, targetWindowHwnd)) {
|
||||
if (
|
||||
targetWindowHwnd !== null &&
|
||||
bindWindowsOverlayAboveMpv(overlayHwnd, targetWindowHwnd)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
const tracker = appState.windowTracker;
|
||||
const mpvResult = tracker
|
||||
? (() => {
|
||||
try {
|
||||
const win32 = require('./window-trackers/win32') as typeof import('./window-trackers/win32');
|
||||
const win32 =
|
||||
require('./window-trackers/win32') as typeof import('./window-trackers/win32');
|
||||
const poll = win32.findMpvWindows();
|
||||
const focused = poll.matches.find((m) => m.isForeground);
|
||||
return focused ?? [...poll.matches].sort((a, b) => b.area - a.area)[0] ?? null;
|
||||
|
||||
@@ -132,7 +132,10 @@ export function createMainBootServices<
|
||||
TSubtitleWebSocket,
|
||||
TLogger,
|
||||
TRuntimeRegistry,
|
||||
TOverlayManager extends { getMainWindow: () => BrowserWindow | null; getModalWindow: () => BrowserWindow | null },
|
||||
TOverlayManager extends {
|
||||
getMainWindow: () => BrowserWindow | null;
|
||||
getModalWindow: () => BrowserWindow | null;
|
||||
},
|
||||
TOverlayModalInputState extends OverlayModalInputStateShape,
|
||||
TOverlayContentMeasurementStore,
|
||||
TOverlayModalRuntime,
|
||||
|
||||
@@ -33,6 +33,8 @@ function createMockWindow(): MockWindow & {
|
||||
hide: () => void;
|
||||
destroy: () => void;
|
||||
focus: () => void;
|
||||
emitDidFinishLoad: () => void;
|
||||
emitReadyToShow: () => void;
|
||||
once: (event: 'ready-to-show', cb: () => void) => void;
|
||||
webContents: {
|
||||
focused: boolean;
|
||||
@@ -89,6 +91,18 @@ function createMockWindow(): MockWindow & {
|
||||
focus: () => {
|
||||
state.focused = true;
|
||||
},
|
||||
emitDidFinishLoad: () => {
|
||||
const callbacks = state.loadCallbacks.splice(0);
|
||||
for (const callback of callbacks) {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
emitReadyToShow: () => {
|
||||
const callbacks = state.readyToShowCallbacks.splice(0);
|
||||
for (const callback of callbacks) {
|
||||
callback();
|
||||
}
|
||||
},
|
||||
once: (_event: 'ready-to-show', cb: () => void) => {
|
||||
state.readyToShowCallbacks.push(cb);
|
||||
},
|
||||
@@ -166,13 +180,15 @@ function createMockWindow(): MockWindow & {
|
||||
get: () => state.contentReady,
|
||||
set: (value: boolean) => {
|
||||
state.contentReady = value;
|
||||
(window as typeof window & { __subminerOverlayContentReady?: boolean }).__subminerOverlayContentReady =
|
||||
value;
|
||||
(
|
||||
window as typeof window & { __subminerOverlayContentReady?: boolean }
|
||||
).__subminerOverlayContentReady = value;
|
||||
},
|
||||
});
|
||||
|
||||
(window as typeof window & { __subminerOverlayContentReady?: boolean }).__subminerOverlayContentReady =
|
||||
state.contentReady;
|
||||
(
|
||||
window as typeof window & { __subminerOverlayContentReady?: boolean }
|
||||
).__subminerOverlayContentReady = state.contentReady;
|
||||
|
||||
return window;
|
||||
}
|
||||
@@ -269,16 +285,13 @@ test('sendToActiveOverlayWindow waits for blank modal URL before sending open co
|
||||
|
||||
assert.equal(sent, true);
|
||||
assert.deepEqual(window.sent, []);
|
||||
|
||||
assert.equal(window.loadCallbacks.length, 1);
|
||||
assert.equal(window.readyToShowCallbacks.length, 1);
|
||||
window.loading = false;
|
||||
window.url = 'file:///overlay/index.html?layer=modal';
|
||||
window.loadCallbacks[0]!();
|
||||
window.emitDidFinishLoad();
|
||||
assert.deepEqual(window.sent, []);
|
||||
|
||||
window.contentReady = true;
|
||||
window.readyToShowCallbacks[0]!();
|
||||
window.emitReadyToShow();
|
||||
|
||||
runtime.notifyOverlayModalOpened('runtime-options');
|
||||
assert.deepEqual(window.sent, [['runtime-options:open']]);
|
||||
@@ -549,15 +562,27 @@ test('handleOverlayModalClosed destroys modal window for single kiku modal', ()
|
||||
|
||||
test('modal fallback reveal skips showing window when content is not ready', async () => {
|
||||
const window = createMockWindow();
|
||||
const runtime = createOverlayModalRuntimeService({
|
||||
getMainWindow: () => null,
|
||||
getModalWindow: () => window as never,
|
||||
createModalWindow: () => {
|
||||
throw new Error('modal window should not be created when already present');
|
||||
let scheduledReveal: (() => void) | null = null;
|
||||
const runtime = createOverlayModalRuntimeService(
|
||||
{
|
||||
getMainWindow: () => null,
|
||||
getModalWindow: () => window as never,
|
||||
createModalWindow: () => {
|
||||
throw new Error('modal window should not be created when already present');
|
||||
},
|
||||
getModalGeometry: () => ({ x: 0, y: 0, width: 400, height: 300 }),
|
||||
setModalWindowBounds: () => {},
|
||||
},
|
||||
getModalGeometry: () => ({ x: 0, y: 0, width: 400, height: 300 }),
|
||||
setModalWindowBounds: () => {},
|
||||
});
|
||||
{
|
||||
scheduleRevealFallback: (callback) => {
|
||||
scheduledReveal = callback;
|
||||
return { scheduled: true } as never;
|
||||
},
|
||||
clearRevealFallback: () => {
|
||||
scheduledReveal = null;
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
window.loading = true;
|
||||
window.url = '';
|
||||
@@ -568,10 +593,11 @@ test('modal fallback reveal skips showing window when content is not ready', asy
|
||||
});
|
||||
|
||||
assert.equal(sent, true);
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
setTimeout(resolve, 260);
|
||||
});
|
||||
if (scheduledReveal === null) {
|
||||
throw new Error('expected reveal callback');
|
||||
}
|
||||
const runScheduledReveal: () => void = scheduledReveal;
|
||||
runScheduledReveal();
|
||||
|
||||
assert.equal(window.getShowCount(), 0);
|
||||
|
||||
@@ -599,17 +625,49 @@ test('sendToActiveOverlayWindow waits for modal ready-to-show before delivering
|
||||
|
||||
assert.equal(sent, true);
|
||||
assert.deepEqual(window.sent, []);
|
||||
assert.equal(window.loadCallbacks.length, 1);
|
||||
assert.equal(window.readyToShowCallbacks.length, 1);
|
||||
|
||||
window.loadCallbacks[0]!();
|
||||
window.emitDidFinishLoad();
|
||||
assert.deepEqual(window.sent, []);
|
||||
|
||||
window.contentReady = true;
|
||||
window.readyToShowCallbacks[0]!();
|
||||
window.emitReadyToShow();
|
||||
assert.deepEqual(window.sent, [['runtime-options:open']]);
|
||||
});
|
||||
|
||||
test('sendToActiveOverlayWindow flushes every queued load and ready listener before sending', () => {
|
||||
const window = createMockWindow();
|
||||
window.contentReady = false;
|
||||
const runtime = createOverlayModalRuntimeService({
|
||||
getMainWindow: () => null,
|
||||
getModalWindow: () => window as never,
|
||||
createModalWindow: () => {
|
||||
throw new Error('modal window should not be created when already present');
|
||||
},
|
||||
getModalGeometry: () => ({ x: 0, y: 0, width: 400, height: 300 }),
|
||||
setModalWindowBounds: () => {},
|
||||
});
|
||||
|
||||
assert.equal(
|
||||
runtime.sendToActiveOverlayWindow('runtime-options:open', undefined, {
|
||||
restoreOnModalClose: 'runtime-options',
|
||||
}),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
runtime.sendToActiveOverlayWindow('session-help:open', undefined, {
|
||||
restoreOnModalClose: 'session-help',
|
||||
}),
|
||||
true,
|
||||
);
|
||||
assert.deepEqual(window.sent, []);
|
||||
|
||||
window.emitDidFinishLoad();
|
||||
assert.deepEqual(window.sent, []);
|
||||
|
||||
window.contentReady = true;
|
||||
window.emitReadyToShow();
|
||||
assert.deepEqual(window.sent, [['runtime-options:open'], ['session-help:open']]);
|
||||
});
|
||||
|
||||
test('modal reopen creates a fresh window after close destroys the previous one', () => {
|
||||
const firstWindow = createMockWindow();
|
||||
const secondWindow = createMockWindow();
|
||||
@@ -617,8 +675,7 @@ test('modal reopen creates a fresh window after close destroys the previous one'
|
||||
|
||||
const runtime = createOverlayModalRuntimeService({
|
||||
getMainWindow: () => null,
|
||||
getModalWindow: () =>
|
||||
currentModal && !currentModal.isDestroyed() ? (currentModal as never) : null,
|
||||
getModalWindow: () => currentModal as never,
|
||||
createModalWindow: () => {
|
||||
currentModal = secondWindow;
|
||||
return secondWindow as never;
|
||||
@@ -653,8 +710,7 @@ test('modal reopen after close-destroy notifies state change on fresh window lif
|
||||
const runtime = createOverlayModalRuntimeService(
|
||||
{
|
||||
getMainWindow: () => null,
|
||||
getModalWindow: () =>
|
||||
currentModal && !currentModal.isDestroyed() ? (currentModal as never) : null,
|
||||
getModalWindow: () => currentModal as never,
|
||||
createModalWindow: () => {
|
||||
currentModal = secondWindow;
|
||||
return secondWindow as never;
|
||||
|
||||
@@ -50,8 +50,12 @@ export interface OverlayModalRuntime {
|
||||
getRestoreVisibleOverlayOnModalClose: () => Set<OverlayHostedModal>;
|
||||
}
|
||||
|
||||
type RevealFallbackHandle = NonNullable<Parameters<typeof globalThis.clearTimeout>[0]>;
|
||||
|
||||
export interface OverlayModalRuntimeOptions {
|
||||
onModalStateChange?: (isActive: boolean) => void;
|
||||
scheduleRevealFallback?: (callback: () => void, delayMs: number) => RevealFallbackHandle;
|
||||
clearRevealFallback?: (timeout: RevealFallbackHandle) => void;
|
||||
}
|
||||
|
||||
export function createOverlayModalRuntimeService(
|
||||
@@ -65,7 +69,11 @@ export function createOverlayModalRuntimeService(
|
||||
let mainWindowHiddenByModal = false;
|
||||
let modalWindowPrimedForImmediateShow = false;
|
||||
let pendingModalWindowReveal: BrowserWindow | null = null;
|
||||
let pendingModalWindowRevealTimeout: ReturnType<typeof setTimeout> | null = null;
|
||||
let pendingModalWindowRevealTimeout: RevealFallbackHandle | null = null;
|
||||
const scheduleRevealFallback = (callback: () => void, delayMs: number): RevealFallbackHandle =>
|
||||
(options.scheduleRevealFallback ?? globalThis.setTimeout)(callback, delayMs);
|
||||
const clearRevealFallback = (timeout: RevealFallbackHandle): void =>
|
||||
(options.clearRevealFallback ?? globalThis.clearTimeout)(timeout);
|
||||
|
||||
const notifyModalStateChange = (nextState: boolean): void => {
|
||||
if (modalActive === nextState) return;
|
||||
@@ -207,7 +215,7 @@ export function createOverlayModalRuntimeService(
|
||||
return;
|
||||
}
|
||||
|
||||
clearTimeout(pendingModalWindowRevealTimeout);
|
||||
clearRevealFallback(pendingModalWindowRevealTimeout);
|
||||
pendingModalWindowRevealTimeout = null;
|
||||
pendingModalWindowReveal = null;
|
||||
};
|
||||
@@ -266,7 +274,7 @@ export function createOverlayModalRuntimeService(
|
||||
return;
|
||||
}
|
||||
|
||||
pendingModalWindowRevealTimeout = setTimeout(() => {
|
||||
pendingModalWindowRevealTimeout = scheduleRevealFallback(() => {
|
||||
const targetWindow = pendingModalWindowReveal;
|
||||
clearPendingModalWindowReveal();
|
||||
if (!targetWindow || targetWindow.isDestroyed() || targetWindow.isVisible()) {
|
||||
|
||||
@@ -16,7 +16,8 @@ test('on will quit cleanup handler runs all cleanup steps', () => {
|
||||
unregisterAllGlobalShortcuts: () => calls.push('unregister-shortcuts'),
|
||||
stopSubtitleWebsocket: () => calls.push('stop-ws'),
|
||||
stopTexthookerService: () => calls.push('stop-texthooker'),
|
||||
clearWindowsVisibleOverlayForegroundPollLoop: () => calls.push('clear-windows-visible-overlay-poll'),
|
||||
clearWindowsVisibleOverlayForegroundPollLoop: () =>
|
||||
calls.push('clear-windows-visible-overlay-poll'),
|
||||
destroyMainOverlayWindow: () => calls.push('destroy-main-overlay-window'),
|
||||
destroyModalOverlayWindow: () => calls.push('destroy-modal-overlay-window'),
|
||||
destroyYomitanParserWindow: () => calls.push('destroy-yomitan-window'),
|
||||
|
||||
@@ -11,10 +11,14 @@ test('createConfigHotReloadAppliedHandler runs all hot-reload effects', () => {
|
||||
const config = deepCloneConfig(DEFAULT_CONFIG);
|
||||
const calls: string[] = [];
|
||||
const ankiPatches: Array<{ enabled: boolean }> = [];
|
||||
const sessionBindingWarnings: string[][] = [];
|
||||
|
||||
const applyHotReload = createConfigHotReloadAppliedHandler({
|
||||
setKeybindings: () => calls.push('set:keybindings'),
|
||||
setSessionBindings: () => calls.push('set:session-bindings'),
|
||||
setSessionBindings: (_sessionBindings, warnings) => {
|
||||
calls.push('set:session-bindings');
|
||||
sessionBindingWarnings.push(warnings.map((warning) => warning.message));
|
||||
},
|
||||
refreshGlobalAndOverlayShortcuts: () => calls.push('refresh:shortcuts'),
|
||||
setSecondarySubMode: (mode) => calls.push(`set:secondary:${mode}`),
|
||||
broadcastToOverlayWindows: (channel, payload) =>
|
||||
@@ -44,6 +48,12 @@ test('createConfigHotReloadAppliedHandler runs all hot-reload effects', () => {
|
||||
assert.ok(calls.some((entry) => entry.startsWith('broadcast:secondary-subtitle:mode:')));
|
||||
assert.ok(calls.includes('broadcast:config:hot-reload:object'));
|
||||
assert.deepEqual(ankiPatches, [{ enabled: config.ankiConnect.ai.enabled }]);
|
||||
assert.equal(sessionBindingWarnings.length, 1);
|
||||
assert.ok(
|
||||
sessionBindingWarnings[0]?.some((message) =>
|
||||
message.includes('Rename shortcuts.toggleVisibleOverlayGlobal'),
|
||||
),
|
||||
);
|
||||
});
|
||||
|
||||
test('createConfigHotReloadAppliedHandler skips optional effects when no hot fields', () => {
|
||||
@@ -70,6 +80,34 @@ test('createConfigHotReloadAppliedHandler skips optional effects when no hot fie
|
||||
assert.deepEqual(calls, ['set:keybindings', 'set:session-bindings']);
|
||||
});
|
||||
|
||||
test('createConfigHotReloadAppliedHandler forwards compiled session-binding warnings', () => {
|
||||
const config = deepCloneConfig(DEFAULT_CONFIG);
|
||||
config.shortcuts.openSessionHelp = 'Ctrl+?';
|
||||
const warnings: string[][] = [];
|
||||
|
||||
const applyHotReload = createConfigHotReloadAppliedHandler({
|
||||
setKeybindings: () => {},
|
||||
setSessionBindings: (_sessionBindings, sessionBindingWarnings) => {
|
||||
warnings.push(sessionBindingWarnings.map((warning) => warning.message));
|
||||
},
|
||||
refreshGlobalAndOverlayShortcuts: () => {},
|
||||
setSecondarySubMode: () => {},
|
||||
broadcastToOverlayWindows: () => {},
|
||||
applyAnkiRuntimeConfigPatch: () => {},
|
||||
});
|
||||
|
||||
applyHotReload(
|
||||
{
|
||||
hotReloadFields: ['shortcuts'],
|
||||
restartRequiredFields: [],
|
||||
},
|
||||
config,
|
||||
);
|
||||
|
||||
assert.equal(warnings.length, 1);
|
||||
assert.ok(warnings[0]?.some((message) => message.includes('Unsupported accelerator key token')));
|
||||
});
|
||||
|
||||
test('createConfigHotReloadMessageHandler mirrors message to OSD and desktop notification', () => {
|
||||
const calls: string[] = [];
|
||||
const handleMessage = createConfigHotReloadMessageHandler({
|
||||
|
||||
@@ -7,7 +7,10 @@ import type { ConfigHotReloadPayload, ResolvedConfig, SecondarySubMode } from '.
|
||||
|
||||
type ConfigHotReloadAppliedDeps = {
|
||||
setKeybindings: (keybindings: ConfigHotReloadPayload['keybindings']) => void;
|
||||
setSessionBindings: (sessionBindings: ConfigHotReloadPayload['sessionBindings']) => void;
|
||||
setSessionBindings: (
|
||||
sessionBindings: ConfigHotReloadPayload['sessionBindings'],
|
||||
sessionBindingWarnings: ConfigHotReloadPayload['sessionBindingWarnings'],
|
||||
) => void;
|
||||
refreshGlobalAndOverlayShortcuts: () => void;
|
||||
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
||||
broadcastToOverlayWindows: (channel: string, payload: unknown) => void;
|
||||
@@ -37,20 +40,18 @@ export function resolveSubtitleStyleForRenderer(config: ResolvedConfig) {
|
||||
|
||||
export function buildConfigHotReloadPayload(config: ResolvedConfig): ConfigHotReloadPayload {
|
||||
const keybindings = resolveKeybindings(config, DEFAULT_KEYBINDINGS);
|
||||
const { bindings: sessionBindings } = compileSessionBindings({
|
||||
const { bindings: sessionBindings, warnings: sessionBindingWarnings } = compileSessionBindings({
|
||||
keybindings,
|
||||
shortcuts: resolveConfiguredShortcuts(config, DEFAULT_CONFIG),
|
||||
statsToggleKey: config.stats.toggleKey,
|
||||
platform:
|
||||
process.platform === 'darwin'
|
||||
? 'darwin'
|
||||
: process.platform === 'win32'
|
||||
? 'win32'
|
||||
: 'linux',
|
||||
process.platform === 'darwin' ? 'darwin' : process.platform === 'win32' ? 'win32' : 'linux',
|
||||
rawConfig: config,
|
||||
});
|
||||
return {
|
||||
keybindings,
|
||||
sessionBindings,
|
||||
sessionBindingWarnings,
|
||||
subtitleStyle: resolveSubtitleStyleForRenderer(config),
|
||||
subtitleSidebar: config.subtitleSidebar,
|
||||
secondarySubMode: config.secondarySub.defaultMode,
|
||||
@@ -61,7 +62,7 @@ export function createConfigHotReloadAppliedHandler(deps: ConfigHotReloadApplied
|
||||
return (diff: ConfigHotReloadDiff, config: ResolvedConfig): void => {
|
||||
const payload = buildConfigHotReloadPayload(config);
|
||||
deps.setKeybindings(payload.keybindings);
|
||||
deps.setSessionBindings(payload.sessionBindings);
|
||||
deps.setSessionBindings(payload.sessionBindings, payload.sessionBindingWarnings);
|
||||
|
||||
if (diff.hotReloadFields.includes('shortcuts')) {
|
||||
deps.refreshGlobalAndOverlayShortcuts();
|
||||
|
||||
@@ -86,9 +86,13 @@ test('config hot reload message main deps builder maps notifications', () => {
|
||||
|
||||
test('config hot reload applied main deps builder maps callbacks', () => {
|
||||
const calls: string[] = [];
|
||||
const warningCounts: number[] = [];
|
||||
const buildDeps = createBuildConfigHotReloadAppliedMainDepsHandler({
|
||||
setKeybindings: () => calls.push('keybindings'),
|
||||
setSessionBindings: () => calls.push('session-bindings'),
|
||||
setSessionBindings: (_sessionBindings, warnings) => {
|
||||
calls.push('session-bindings');
|
||||
warningCounts.push(warnings.length);
|
||||
},
|
||||
refreshGlobalAndOverlayShortcuts: () => calls.push('refresh-shortcuts'),
|
||||
setSecondarySubMode: () => calls.push('set-secondary'),
|
||||
broadcastToOverlayWindows: (channel) => calls.push(`broadcast:${channel}`),
|
||||
@@ -97,7 +101,7 @@ test('config hot reload applied main deps builder maps callbacks', () => {
|
||||
|
||||
const deps = buildDeps();
|
||||
deps.setKeybindings([]);
|
||||
deps.setSessionBindings([]);
|
||||
deps.setSessionBindings([], []);
|
||||
deps.refreshGlobalAndOverlayShortcuts();
|
||||
deps.setSecondarySubMode('hover');
|
||||
deps.broadcastToOverlayWindows('config:hot-reload', {});
|
||||
@@ -110,6 +114,7 @@ test('config hot reload applied main deps builder maps callbacks', () => {
|
||||
'broadcast:config:hot-reload',
|
||||
'apply-anki',
|
||||
]);
|
||||
assert.deepEqual(warningCounts, [0]);
|
||||
});
|
||||
|
||||
test('config hot reload runtime main deps builder maps runtime callbacks', () => {
|
||||
|
||||
@@ -62,7 +62,10 @@ export function createBuildConfigHotReloadMessageMainDepsHandler(
|
||||
|
||||
export function createBuildConfigHotReloadAppliedMainDepsHandler(deps: {
|
||||
setKeybindings: (keybindings: ConfigHotReloadPayload['keybindings']) => void;
|
||||
setSessionBindings: (sessionBindings: ConfigHotReloadPayload['sessionBindings']) => void;
|
||||
setSessionBindings: (
|
||||
sessionBindings: ConfigHotReloadPayload['sessionBindings'],
|
||||
sessionBindingWarnings: ConfigHotReloadPayload['sessionBindingWarnings'],
|
||||
) => void;
|
||||
refreshGlobalAndOverlayShortcuts: () => void;
|
||||
setSecondarySubMode: (mode: SecondarySubMode) => void;
|
||||
broadcastToOverlayWindows: (channel: string, payload: unknown) => void;
|
||||
@@ -73,8 +76,10 @@ export function createBuildConfigHotReloadAppliedMainDepsHandler(deps: {
|
||||
return () => ({
|
||||
setKeybindings: (keybindings: ConfigHotReloadPayload['keybindings']) =>
|
||||
deps.setKeybindings(keybindings),
|
||||
setSessionBindings: (sessionBindings: ConfigHotReloadPayload['sessionBindings']) =>
|
||||
deps.setSessionBindings(sessionBindings),
|
||||
setSessionBindings: (
|
||||
sessionBindings: ConfigHotReloadPayload['sessionBindings'],
|
||||
sessionBindingWarnings: ConfigHotReloadPayload['sessionBindingWarnings'],
|
||||
) => deps.setSessionBindings(sessionBindings, sessionBindingWarnings),
|
||||
refreshGlobalAndOverlayShortcuts: () => deps.refreshGlobalAndOverlayShortcuts(),
|
||||
setSecondarySubMode: (mode: SecondarySubMode) => deps.setSecondarySubMode(mode),
|
||||
broadcastToOverlayWindows: (channel: string, payload: unknown) =>
|
||||
|
||||
@@ -94,12 +94,33 @@ test('shouldAutoOpenFirstRunSetup only for startup/setup intents', () => {
|
||||
});
|
||||
|
||||
test('shouldAutoOpenFirstRunSetup treats numeric startup counts as explicit commands', () => {
|
||||
assert.equal(shouldAutoOpenFirstRunSetup(makeArgs({ start: true, copySubtitleCount: 2 })), false);
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ start: true, copySubtitleCount: 2 })),
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ background: true, mineSentenceCount: 1 })),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('shouldAutoOpenFirstRunSetup treats session and stats startup commands as explicit commands', () => {
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ start: true, toggleSubtitleSidebar: true })),
|
||||
false,
|
||||
);
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ background: true, mineSentenceCount: 1 })),
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ background: true, openSessionHelp: true })),
|
||||
false,
|
||||
);
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ start: true, openControllerSelect: true })),
|
||||
false,
|
||||
);
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ background: true, openControllerDebug: true })),
|
||||
false,
|
||||
);
|
||||
assert.equal(shouldAutoOpenFirstRunSetup(makeArgs({ start: true, stats: true })), false);
|
||||
assert.equal(
|
||||
shouldAutoOpenFirstRunSetup(makeArgs({ background: true, jellyfinSubtitleUrlsOnly: true })),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -79,7 +79,11 @@ function hasAnyStartupCommandBeyondSetup(args: CliArgs): boolean {
|
||||
args.triggerSubsync ||
|
||||
args.markAudioCard ||
|
||||
args.toggleStatsOverlay ||
|
||||
args.toggleSubtitleSidebar ||
|
||||
args.openRuntimeOptions ||
|
||||
args.openSessionHelp ||
|
||||
args.openControllerSelect ||
|
||||
args.openControllerDebug ||
|
||||
args.openJimaku ||
|
||||
args.openYoutubePicker ||
|
||||
args.openPlaylistBrowser ||
|
||||
@@ -93,12 +97,14 @@ function hasAnyStartupCommandBeyondSetup(args: CliArgs): boolean {
|
||||
args.anilistSetup ||
|
||||
args.anilistRetryQueue ||
|
||||
args.dictionary ||
|
||||
args.stats ||
|
||||
args.jellyfin ||
|
||||
args.jellyfinLogin ||
|
||||
args.jellyfinLogout ||
|
||||
args.jellyfinLibraries ||
|
||||
args.jellyfinItems ||
|
||||
args.jellyfinSubtitles ||
|
||||
args.jellyfinSubtitleUrlsOnly ||
|
||||
args.jellyfinPlay ||
|
||||
args.jellyfinRemoteAnnounce ||
|
||||
args.jellyfinPreviewAuth ||
|
||||
|
||||
@@ -194,7 +194,9 @@ test('createImmersionTrackerStartupHandler keeps tracker startup alive when mpv
|
||||
),
|
||||
);
|
||||
assert.equal(
|
||||
calls.some((entry) => entry.startsWith('warn:Immersion tracker startup failed; disabling tracking.')),
|
||||
calls.some((entry) =>
|
||||
entry.startsWith('warn:Immersion tracker startup failed; disabling tracking.'),
|
||||
),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -105,7 +105,10 @@ export function createImmersionTrackerStartupHandler(
|
||||
try {
|
||||
mpvClient.connect();
|
||||
} catch (error) {
|
||||
deps.logWarn('MPV auto-connect failed during immersion tracker startup; continuing.', error);
|
||||
deps.logWarn(
|
||||
'MPV auto-connect failed during immersion tracker startup; continuing.',
|
||||
error,
|
||||
);
|
||||
}
|
||||
}
|
||||
deps.seedTrackerFromCurrentMedia();
|
||||
|
||||
@@ -113,3 +113,12 @@ test('applyStartupState preserves cleared startup-only runtime flags', () => {
|
||||
|
||||
assert.equal(appState.initialArgs?.settings, true);
|
||||
});
|
||||
|
||||
test('createAppState starts with session bindings marked uninitialized', () => {
|
||||
const appState = createAppState({
|
||||
mpvSocketPath: '/tmp/mpv.sock',
|
||||
texthookerPort: 4000,
|
||||
});
|
||||
|
||||
assert.equal(appState.sessionBindingsInitialized, false);
|
||||
});
|
||||
|
||||
@@ -172,6 +172,7 @@ export interface AppState {
|
||||
mecabTokenizer: MecabTokenizer | null;
|
||||
keybindings: Keybinding[];
|
||||
sessionBindings: CompiledSessionBinding[];
|
||||
sessionBindingsInitialized: boolean;
|
||||
subtitleTimingTracker: SubtitleTimingTracker | null;
|
||||
immersionTracker: ImmersionTrackerService | null;
|
||||
ankiIntegration: AnkiIntegration | null;
|
||||
@@ -255,6 +256,7 @@ export function createAppState(values: AppStateInitialValues): AppState {
|
||||
mecabTokenizer: null,
|
||||
keybindings: [],
|
||||
sessionBindings: [],
|
||||
sessionBindingsInitialized: false,
|
||||
subtitleTimingTracker: null,
|
||||
immersionTracker: null,
|
||||
ankiIntegration: null,
|
||||
|
||||
@@ -124,7 +124,9 @@ function createQueuedIpcListenerWithPayload<T>(
|
||||
|
||||
const onOpenRuntimeOptionsEvent = createQueuedIpcListener(IPC_CHANNELS.event.runtimeOptionsOpen);
|
||||
const onOpenSessionHelpEvent = createQueuedIpcListener(IPC_CHANNELS.event.sessionHelpOpen);
|
||||
const onOpenControllerSelectEvent = createQueuedIpcListener(IPC_CHANNELS.event.controllerSelectOpen);
|
||||
const onOpenControllerSelectEvent = createQueuedIpcListener(
|
||||
IPC_CHANNELS.event.controllerSelectOpen,
|
||||
);
|
||||
const onOpenControllerDebugEvent = createQueuedIpcListener(IPC_CHANNELS.event.controllerDebugOpen);
|
||||
const onOpenJimakuEvent = createQueuedIpcListener(IPC_CHANNELS.event.jimakuOpen);
|
||||
const onOpenYoutubeTrackPickerEvent = createQueuedIpcListenerWithPayload<YoutubePickerOpenPayload>(
|
||||
|
||||
@@ -4,7 +4,7 @@ import { readFileSync } from 'node:fs';
|
||||
import { resolve } from 'node:path';
|
||||
|
||||
const prereleaseWorkflowPath = resolve(__dirname, '../.github/workflows/prerelease.yml');
|
||||
const prereleaseWorkflow = readFileSync(prereleaseWorkflowPath, 'utf8');
|
||||
const prereleaseWorkflow = readFileSync(prereleaseWorkflowPath, 'utf8').replace(/\r\n/g, '\n');
|
||||
const packageJsonPath = resolve(__dirname, '../package.json');
|
||||
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as {
|
||||
scripts: Record<string, string>;
|
||||
@@ -12,8 +12,12 @@ const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8')) as {
|
||||
|
||||
test('prerelease workflow triggers on beta and rc tags only', () => {
|
||||
assert.match(prereleaseWorkflow, /name: Prerelease/);
|
||||
assert.match(prereleaseWorkflow, /tags:\s*\n\s*-\s*'v\*-beta\.\*'/);
|
||||
assert.match(prereleaseWorkflow, /tags:\s*\n(?:.*\n)*\s*-\s*'v\*-rc\.\*'/);
|
||||
const tagsBlock = prereleaseWorkflow.match(/tags:\s*\n((?:\s*-\s*'[^']+'\s*\n?)+)/);
|
||||
assert.ok(tagsBlock, 'Workflow tags block not found');
|
||||
const tagsText = tagsBlock[1];
|
||||
assert.ok(tagsText, 'Workflow tags entries not found');
|
||||
const tagPatterns = [...tagsText.matchAll(/-\s*'([^']+)'/g)].map(([, pattern]) => pattern);
|
||||
assert.deepEqual(tagPatterns, ['v*-beta.*', 'v*-rc.*']);
|
||||
});
|
||||
|
||||
test('package scripts expose prerelease notes generation separately from stable changelog build', () => {
|
||||
@@ -28,12 +32,30 @@ test('prerelease workflow generates prerelease notes from pending fragments', ()
|
||||
assert.doesNotMatch(prereleaseWorkflow, /bun run changelog:build --version/);
|
||||
});
|
||||
|
||||
test('prerelease workflow includes the environment suite in the gate sequence', () => {
|
||||
assert.match(
|
||||
prereleaseWorkflow,
|
||||
/Test suite \(source\)\n\s*run: bun run test:fast\n\s*\n\s*- name: Environment suite(?: \(source\))?\n\s*run: bun run test:env\n\s*\n\s*- name: Coverage suite \(maintained source lane\)/,
|
||||
);
|
||||
});
|
||||
|
||||
test('prerelease workflow publishes GitHub prereleases and keeps them off latest', () => {
|
||||
assert.match(prereleaseWorkflow, /gh release edit[\s\S]*--prerelease/);
|
||||
assert.match(prereleaseWorkflow, /gh release create[\s\S]*--prerelease/);
|
||||
assert.match(prereleaseWorkflow, /gh release create[\s\S]*--latest=false/);
|
||||
});
|
||||
|
||||
test('prerelease workflow scopes dependency caches by runner architecture', () => {
|
||||
const archScopedCacheKeyMatches = prereleaseWorkflow.match(
|
||||
/key:\s*\${{\s*runner\.os\s*}}-\${{\s*runner\.arch\s*}}-bun-/g,
|
||||
);
|
||||
const archScopedRestoreKeyMatches = prereleaseWorkflow.match(
|
||||
/\${{\s*runner\.os\s*}}-\${{\s*runner\.arch\s*}}-bun-/g,
|
||||
);
|
||||
assert.equal(archScopedCacheKeyMatches?.length, 5);
|
||||
assert.ok((archScopedRestoreKeyMatches?.length ?? 0) >= 10);
|
||||
});
|
||||
|
||||
test('prerelease workflow builds and uploads all release platforms', () => {
|
||||
assert.match(prereleaseWorkflow, /build-linux:/);
|
||||
assert.match(prereleaseWorkflow, /build-macos:/);
|
||||
@@ -54,6 +76,31 @@ test('prerelease workflow publishes the same release assets as the stable workfl
|
||||
);
|
||||
});
|
||||
|
||||
test('prerelease workflow writes checksum entries using release asset basenames', () => {
|
||||
assert.match(prereleaseWorkflow, /: > release\/SHA256SUMS\.txt/);
|
||||
assert.match(prereleaseWorkflow, /for file in "\$\{files\[@\]\}"; do/);
|
||||
assert.match(prereleaseWorkflow, /\$\{file##\*\/\}/);
|
||||
assert.doesNotMatch(
|
||||
prereleaseWorkflow,
|
||||
/sha256sum "\$\{files\[@\]\}" > release\/SHA256SUMS\.txt/,
|
||||
);
|
||||
});
|
||||
|
||||
test('prerelease workflow validates artifacts before publishing the release and only undrafts after upload', () => {
|
||||
const artifactsIndex = prereleaseWorkflow.indexOf('artifacts=(');
|
||||
const createIndex = prereleaseWorkflow.indexOf('gh release create');
|
||||
const uploadIndex = prereleaseWorkflow.indexOf('gh release upload');
|
||||
const undraftIndex = prereleaseWorkflow.indexOf('--draft=false');
|
||||
|
||||
assert.notEqual(artifactsIndex, -1);
|
||||
assert.notEqual(createIndex, -1);
|
||||
assert.notEqual(uploadIndex, -1);
|
||||
assert.notEqual(undraftIndex, -1);
|
||||
assert.ok(artifactsIndex < createIndex);
|
||||
assert.ok(uploadIndex < undraftIndex);
|
||||
assert.match(prereleaseWorkflow, /gh release create[\s\S]*--draft[\s\S]*--prerelease/);
|
||||
});
|
||||
|
||||
test('prerelease workflow does not publish to AUR', () => {
|
||||
assert.doesNotMatch(prereleaseWorkflow, /aur-publish:/);
|
||||
assert.doesNotMatch(prereleaseWorkflow, /AUR_SSH_PRIVATE_KEY/);
|
||||
|
||||
@@ -77,6 +77,13 @@ test('release workflow includes the Windows installer in checksums and uploaded
|
||||
);
|
||||
});
|
||||
|
||||
test('release workflow writes checksum entries using release asset basenames', () => {
|
||||
assert.match(releaseWorkflow, /: > release\/SHA256SUMS\.txt/);
|
||||
assert.match(releaseWorkflow, /for file in "\$\{files\[@\]\}"; do/);
|
||||
assert.match(releaseWorkflow, /\$\{file##\*\/\}/);
|
||||
assert.doesNotMatch(releaseWorkflow, /sha256sum "\$\{files\[@\]\}" > release\/SHA256SUMS\.txt/);
|
||||
});
|
||||
|
||||
test('release package scripts disable implicit electron-builder publishing', () => {
|
||||
assert.match(packageJson.scripts['build:appimage'] ?? '', /--publish never/);
|
||||
assert.match(packageJson.scripts['build:mac'] ?? '', /--publish never/);
|
||||
|
||||
@@ -364,7 +364,10 @@ test('isYomitanPopupVisible requires visible iframe geometry', () => {
|
||||
const root = {
|
||||
querySelectorAll: (value: string) => {
|
||||
selectors.push(value);
|
||||
if (value === YOMITAN_POPUP_VISIBLE_HOST_SELECTOR || value === YOMITAN_POPUP_HOST_SELECTOR) {
|
||||
if (
|
||||
value === YOMITAN_POPUP_VISIBLE_HOST_SELECTOR ||
|
||||
value === YOMITAN_POPUP_HOST_SELECTOR
|
||||
) {
|
||||
return [];
|
||||
}
|
||||
return [hiddenFrame, visibleFrame];
|
||||
|
||||
@@ -78,6 +78,7 @@ function installKeyboardTestGlobals() {
|
||||
let markActiveVideoWatchedResult = true;
|
||||
let markActiveVideoWatchedCalls = 0;
|
||||
let statsToggleOverlayCalls = 0;
|
||||
const openedModalNotifications: string[] = [];
|
||||
let selectionClearCount = 0;
|
||||
let selectionAddCount = 0;
|
||||
|
||||
@@ -183,6 +184,9 @@ function installKeyboardTestGlobals() {
|
||||
focusMainWindowCalls += 1;
|
||||
return Promise.resolve();
|
||||
},
|
||||
notifyOverlayModalOpened: (modal: string) => {
|
||||
openedModalNotifications.push(modal);
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
@@ -312,6 +316,7 @@ function installKeyboardTestGlobals() {
|
||||
},
|
||||
markActiveVideoWatchedCalls: () => markActiveVideoWatchedCalls,
|
||||
statsToggleOverlayCalls: () => statsToggleOverlayCalls,
|
||||
openedModalNotifications,
|
||||
getPlaybackPaused: async () => playbackPausedResponse,
|
||||
setPlaybackPausedResponse: (value: boolean | null) => {
|
||||
playbackPausedResponse = value;
|
||||
@@ -326,6 +331,8 @@ function createKeyboardHandlerHarness() {
|
||||
const testGlobals = installKeyboardTestGlobals();
|
||||
const subtitleRootClassList = createClassList();
|
||||
let controllerSelectKeydownCount = 0;
|
||||
let openControllerSelectCount = 0;
|
||||
let openControllerDebugCount = 0;
|
||||
let playlistBrowserKeydownCount = 0;
|
||||
|
||||
const createWordNode = (left: number) => ({
|
||||
@@ -373,6 +380,12 @@ function createKeyboardHandlerHarness() {
|
||||
},
|
||||
handleSessionHelpKeydown: () => false,
|
||||
openSessionHelpModal: () => {},
|
||||
openControllerSelectModal: () => {
|
||||
openControllerSelectCount += 1;
|
||||
},
|
||||
openControllerDebugModal: () => {
|
||||
openControllerDebugCount += 1;
|
||||
},
|
||||
appendClipboardVideoToQueue: () => {},
|
||||
getPlaybackPaused: () => testGlobals.getPlaybackPaused(),
|
||||
});
|
||||
@@ -382,6 +395,8 @@ function createKeyboardHandlerHarness() {
|
||||
handlers,
|
||||
testGlobals,
|
||||
controllerSelectKeydownCount: () => controllerSelectKeydownCount,
|
||||
openControllerSelectCount: () => openControllerSelectCount,
|
||||
openControllerDebugCount: () => openControllerDebugCount,
|
||||
playlistBrowserKeydownCount: () => playlistBrowserKeydownCount,
|
||||
setWordCount: (count: number) => {
|
||||
wordNodes = Array.from({ length: count }, (_, index) => createWordNode(10 + index * 70));
|
||||
@@ -389,6 +404,88 @@ function createKeyboardHandlerHarness() {
|
||||
};
|
||||
}
|
||||
|
||||
test('session help chord resolver follows remapped session bindings', async () => {
|
||||
const { handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
|
||||
assert.deepEqual(handlers.getSessionHelpOpeningInfo(), {
|
||||
bindingKey: 'KeyH',
|
||||
fallbackUsed: false,
|
||||
fallbackUnavailable: false,
|
||||
});
|
||||
|
||||
handlers.updateSessionBindings([
|
||||
{
|
||||
sourcePath: 'keybindings[0].key',
|
||||
originalKey: 'KeyH',
|
||||
key: { code: 'KeyH', modifiers: [] },
|
||||
actionType: 'session-action',
|
||||
actionId: 'openJimaku',
|
||||
},
|
||||
{
|
||||
sourcePath: 'keybindings[1].key',
|
||||
originalKey: 'KeyJ',
|
||||
key: { code: 'KeyJ', modifiers: [] },
|
||||
actionType: 'mpv-command',
|
||||
command: ['cycle', 'pause'],
|
||||
},
|
||||
] as never);
|
||||
|
||||
assert.deepEqual(handlers.getSessionHelpOpeningInfo(), {
|
||||
bindingKey: 'KeyK',
|
||||
fallbackUsed: true,
|
||||
fallbackUnavailable: false,
|
||||
});
|
||||
|
||||
handlers.updateSessionBindings([
|
||||
{
|
||||
sourcePath: 'keybindings[0].key',
|
||||
originalKey: 'KeyH',
|
||||
key: { code: 'KeyH', modifiers: [] },
|
||||
actionType: 'session-action',
|
||||
actionId: 'openSessionHelp',
|
||||
},
|
||||
{
|
||||
sourcePath: 'keybindings[1].key',
|
||||
originalKey: 'KeyK',
|
||||
key: { code: 'KeyK', modifiers: [] },
|
||||
actionType: 'session-action',
|
||||
actionId: 'openControllerSelect',
|
||||
},
|
||||
] as never);
|
||||
|
||||
assert.deepEqual(handlers.getSessionHelpOpeningInfo(), {
|
||||
bindingKey: 'KeyK',
|
||||
fallbackUsed: true,
|
||||
fallbackUnavailable: true,
|
||||
});
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('numeric selection ignores non-digit keys instead of falling through to other shortcuts', async () => {
|
||||
const { handlers, testGlobals, ctx } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
handlers.beginSessionNumericSelection('copySubtitleMultiple');
|
||||
|
||||
testGlobals.dispatchKeydown({ key: 'y', code: 'KeyY' });
|
||||
|
||||
assert.equal(ctx.state.chordPending, false);
|
||||
assert.deepEqual(testGlobals.sessionActions, []);
|
||||
assert.equal(
|
||||
testGlobals.commandEvents.some((event) => event.type === 'forwardKeyDown'),
|
||||
false,
|
||||
);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('keyboard mode: left and right move token selection while popup remains open', async () => {
|
||||
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
@@ -631,6 +728,44 @@ test('visible-layer y-t dispatches mpv plugin toggle while overlay owns focus',
|
||||
}
|
||||
});
|
||||
|
||||
test('refreshConfiguredShortcuts updates hot-reloaded stats and watched keys', async () => {
|
||||
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
|
||||
testGlobals.setConfiguredShortcuts({
|
||||
copySubtitle: '',
|
||||
copySubtitleMultiple: '',
|
||||
updateLastCardFromClipboard: '',
|
||||
triggerFieldGrouping: '',
|
||||
triggerSubsync: 'Ctrl+Alt+S',
|
||||
mineSentence: '',
|
||||
mineSentenceMultiple: '',
|
||||
multiCopyTimeoutMs: 3333,
|
||||
toggleSecondarySub: '',
|
||||
markAudioCard: '',
|
||||
openRuntimeOptions: 'CommandOrControl+Shift+O',
|
||||
openJimaku: 'Ctrl+Shift+J',
|
||||
openSessionHelp: 'CommandOrControl+Shift+H',
|
||||
openControllerSelect: 'Alt+C',
|
||||
openControllerDebug: 'Alt+Shift+C',
|
||||
toggleSubtitleSidebar: '',
|
||||
toggleVisibleOverlayGlobal: '',
|
||||
});
|
||||
testGlobals.setStatsToggleKey('');
|
||||
testGlobals.setMarkWatchedKey('');
|
||||
|
||||
await handlers.refreshConfiguredShortcuts();
|
||||
|
||||
assert.equal(ctx.state.sessionActionTimeoutMs, 3333);
|
||||
assert.equal(ctx.state.statsToggleKey, '');
|
||||
assert.equal(ctx.state.markWatchedKey, '');
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('keyboard mode: controller helpers dispatch popup audio play/cycle and scroll bridge commands', async () => {
|
||||
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
@@ -653,8 +788,37 @@ test('keyboard mode: controller helpers dispatch popup audio play/cycle and scro
|
||||
}
|
||||
});
|
||||
|
||||
test('keyboard mode: configured controller debug binding dispatches session action', async () => {
|
||||
const { testGlobals, handlers } = createKeyboardHandlerHarness();
|
||||
test('keyboard mode: configured controller select binding opens locally without dispatching a session action', async () => {
|
||||
const { testGlobals, handlers, openControllerSelectCount } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
handlers.updateSessionBindings([
|
||||
{
|
||||
sourcePath: 'shortcuts.openControllerSelect',
|
||||
originalKey: 'Alt+D',
|
||||
key: { code: 'KeyD', modifiers: ['alt'] },
|
||||
actionType: 'session-action',
|
||||
actionId: 'openControllerSelect',
|
||||
},
|
||||
] as never);
|
||||
|
||||
testGlobals.dispatchKeydown({
|
||||
key: 'd',
|
||||
code: 'KeyD',
|
||||
altKey: true,
|
||||
});
|
||||
|
||||
assert.equal(openControllerSelectCount(), 1);
|
||||
assert.deepEqual(testGlobals.sessionActions, []);
|
||||
assert.deepEqual(testGlobals.openedModalNotifications, ['controller-select']);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('keyboard mode: configured controller debug binding opens locally without dispatching a session action', async () => {
|
||||
const { testGlobals, handlers, openControllerDebugCount } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
@@ -675,14 +839,16 @@ test('keyboard mode: configured controller debug binding dispatches session acti
|
||||
shiftKey: true,
|
||||
});
|
||||
|
||||
assert.deepEqual(testGlobals.sessionActions, [{ actionId: 'openControllerDebug', payload: undefined }]);
|
||||
assert.equal(openControllerDebugCount(), 1);
|
||||
assert.deepEqual(testGlobals.sessionActions, []);
|
||||
assert.deepEqual(testGlobals.openedModalNotifications, ['controller-debug']);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('keyboard mode: configured controller debug binding is not swallowed while popup is visible', async () => {
|
||||
const { ctx, testGlobals, handlers } = createKeyboardHandlerHarness();
|
||||
const { ctx, testGlobals, handlers, openControllerDebugCount } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
@@ -705,7 +871,9 @@ test('keyboard mode: configured controller debug binding is not swallowed while
|
||||
shiftKey: true,
|
||||
});
|
||||
|
||||
assert.deepEqual(testGlobals.sessionActions, [{ actionId: 'openControllerDebug', payload: undefined }]);
|
||||
assert.equal(openControllerDebugCount(), 1);
|
||||
assert.deepEqual(testGlobals.sessionActions, []);
|
||||
assert.deepEqual(testGlobals.openedModalNotifications, ['controller-debug']);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
@@ -822,6 +990,29 @@ test('keyboard mode: configured stats toggle works even while popup is open', as
|
||||
}
|
||||
});
|
||||
|
||||
test('refreshConfiguredShortcuts updates refreshed stats and mark-watched keys', async () => {
|
||||
const { handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
try {
|
||||
await handlers.setupMpvInputForwarding();
|
||||
|
||||
testGlobals.setStatsToggleKey('KeyG');
|
||||
testGlobals.setMarkWatchedKey('KeyM');
|
||||
await handlers.refreshConfiguredShortcuts();
|
||||
|
||||
const beforeMarkWatchedCalls = testGlobals.markActiveVideoWatchedCalls();
|
||||
|
||||
testGlobals.dispatchKeydown({ key: 'g', code: 'KeyG' });
|
||||
testGlobals.dispatchKeydown({ key: 'm', code: 'KeyM' });
|
||||
await wait(10);
|
||||
|
||||
assert.equal(testGlobals.statsToggleOverlayCalls(), 1);
|
||||
assert.equal(testGlobals.markActiveVideoWatchedCalls(), beforeMarkWatchedCalls + 1);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
});
|
||||
|
||||
test('youtube picker: unhandled keys still dispatch mpv keybindings', async () => {
|
||||
const { ctx, handlers, testGlobals } = createKeyboardHandlerHarness();
|
||||
|
||||
@@ -872,7 +1063,9 @@ test('session binding: Ctrl+Alt+S dispatches subsync action locally', async () =
|
||||
|
||||
testGlobals.dispatchKeydown({ key: 's', code: 'KeyS', ctrlKey: true, altKey: true });
|
||||
|
||||
assert.deepEqual(testGlobals.sessionActions, [{ actionId: 'triggerSubsync', payload: undefined }]);
|
||||
assert.deepEqual(testGlobals.sessionActions, [
|
||||
{ actionId: 'triggerSubsync', payload: undefined },
|
||||
]);
|
||||
} finally {
|
||||
testGlobals.restore();
|
||||
}
|
||||
|
||||
@@ -25,6 +25,8 @@ export function createKeyboardHandlers(
|
||||
fallbackUsed: boolean;
|
||||
fallbackUnavailable: boolean;
|
||||
}) => void;
|
||||
openControllerSelectModal?: () => void;
|
||||
openControllerDebugModal?: () => void;
|
||||
appendClipboardVideoToQueue: () => void;
|
||||
getPlaybackPaused: () => Promise<boolean | null>;
|
||||
toggleSubtitleSidebarModal?: () => void;
|
||||
@@ -37,12 +39,10 @@ export function createKeyboardHandlers(
|
||||
let pendingLookupRefreshAfterSubtitleSeek = false;
|
||||
let resetSelectionToStartOnNextSubtitleSync = false;
|
||||
let lookupScanFallbackTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
let pendingNumericSelection:
|
||||
| {
|
||||
actionId: 'copySubtitleMultiple' | 'mineSentenceMultiple';
|
||||
timeout: ReturnType<typeof setTimeout> | null;
|
||||
}
|
||||
| null = null;
|
||||
let pendingNumericSelection: {
|
||||
actionId: 'copySubtitleMultiple' | 'mineSentenceMultiple';
|
||||
timeout: ReturnType<typeof setTimeout> | null;
|
||||
} | null = null;
|
||||
|
||||
const CHORD_MAP = new Map<
|
||||
string,
|
||||
@@ -78,12 +78,27 @@ export function createKeyboardHandlers(
|
||||
return parts.join('+');
|
||||
}
|
||||
|
||||
function updateConfiguredShortcuts(shortcuts: Required<ShortcutsConfig>): void {
|
||||
function updateConfiguredShortcuts(
|
||||
shortcuts: Required<ShortcutsConfig>,
|
||||
statsToggleKey?: string,
|
||||
markWatchedKey?: string,
|
||||
): void {
|
||||
ctx.state.sessionActionTimeoutMs = shortcuts.multiCopyTimeoutMs;
|
||||
if (typeof statsToggleKey === 'string') {
|
||||
ctx.state.statsToggleKey = statsToggleKey;
|
||||
}
|
||||
if (typeof markWatchedKey === 'string') {
|
||||
ctx.state.markWatchedKey = markWatchedKey;
|
||||
}
|
||||
}
|
||||
|
||||
async function refreshConfiguredShortcuts(): Promise<void> {
|
||||
updateConfiguredShortcuts(await window.electronAPI.getConfiguredShortcuts());
|
||||
const [shortcuts, statsToggleKey, markWatchedKey] = await Promise.all([
|
||||
window.electronAPI.getConfiguredShortcuts(),
|
||||
window.electronAPI.getStatsToggleKey(),
|
||||
window.electronAPI.getMarkWatchedKey(),
|
||||
]);
|
||||
updateConfiguredShortcuts(shortcuts, statsToggleKey, markWatchedKey);
|
||||
}
|
||||
|
||||
function updateSessionBindings(bindings: CompiledSessionBinding[]): void {
|
||||
@@ -160,8 +175,9 @@ export function createKeyboardHandlers(
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!/^[1-9]$/.test(e.key) || e.ctrlKey || e.metaKey || e.altKey) {
|
||||
return false;
|
||||
if (!/^[1-9]$/.test(e.key) || e.ctrlKey || e.metaKey || e.altKey || e.shiftKey) {
|
||||
e.preventDefault();
|
||||
return true;
|
||||
}
|
||||
|
||||
e.preventDefault();
|
||||
@@ -181,6 +197,18 @@ export function createKeyboardHandlers(
|
||||
return;
|
||||
}
|
||||
|
||||
if (binding.actionType === 'session-action' && binding.actionId === 'openControllerSelect') {
|
||||
window.electronAPI.notifyOverlayModalOpened('controller-select');
|
||||
options.openControllerSelectModal?.();
|
||||
return;
|
||||
}
|
||||
|
||||
if (binding.actionType === 'session-action' && binding.actionId === 'openControllerDebug') {
|
||||
window.electronAPI.notifyOverlayModalOpened('controller-debug');
|
||||
options.openControllerDebugModal?.();
|
||||
return;
|
||||
}
|
||||
|
||||
if (binding.actionType === 'mpv-command') {
|
||||
dispatchConfiguredMpvCommand(binding.command);
|
||||
return;
|
||||
@@ -899,9 +927,7 @@ export function createKeyboardHandlers(
|
||||
window.electronAPI.getMarkWatchedKey(),
|
||||
]);
|
||||
updateSessionBindings(sessionBindings);
|
||||
updateConfiguredShortcuts(shortcuts);
|
||||
ctx.state.statsToggleKey = statsToggleKey;
|
||||
ctx.state.markWatchedKey = markWatchedKey;
|
||||
updateConfiguredShortcuts(shortcuts, statsToggleKey, markWatchedKey);
|
||||
syncKeyboardTokenSelection();
|
||||
|
||||
const subtitleMutationObserver = new MutationObserver(() => {
|
||||
@@ -1115,6 +1141,7 @@ export function createKeyboardHandlers(
|
||||
|
||||
return {
|
||||
beginSessionNumericSelection,
|
||||
getSessionHelpOpeningInfo: resolveSessionHelpChordBinding,
|
||||
setupMpvInputForwarding,
|
||||
refreshConfiguredShortcuts,
|
||||
updateSessionBindings,
|
||||
|
||||
@@ -73,11 +73,13 @@ export function createMouseHandlers(
|
||||
syncOverlayMouseIgnoreState(ctx);
|
||||
}
|
||||
|
||||
function reconcilePopupInteraction(args: {
|
||||
assumeVisible?: boolean;
|
||||
reclaimFocus?: boolean;
|
||||
allowPause?: boolean;
|
||||
} = {}): boolean {
|
||||
function reconcilePopupInteraction(
|
||||
args: {
|
||||
assumeVisible?: boolean;
|
||||
reclaimFocus?: boolean;
|
||||
allowPause?: boolean;
|
||||
} = {},
|
||||
): boolean {
|
||||
const popupVisible = syncPopupVisibilityState(args.assumeVisible === true);
|
||||
if (!popupVisible) {
|
||||
syncOverlayMouseIgnoreState(ctx);
|
||||
|
||||
@@ -168,48 +168,54 @@ function withRuntimeOptionsModal(
|
||||
test('openRuntimeOptionsModal shows loading shell before runtime options resolve', async () => {
|
||||
const deferred = createDeferred<RuntimeOptionState[]>();
|
||||
|
||||
await withRuntimeOptionsModal(() => deferred.promise, async (input) => {
|
||||
input.modal.openRuntimeOptionsModal();
|
||||
await withRuntimeOptionsModal(
|
||||
() => deferred.promise,
|
||||
async (input) => {
|
||||
input.modal.openRuntimeOptionsModal();
|
||||
|
||||
assert.equal(input.state.runtimeOptionsModalOpen, true);
|
||||
assert.equal(input.overlayClassList.contains('interactive'), true);
|
||||
assert.equal(input.modalClassList.contains('hidden'), false);
|
||||
assert.equal(input.statusNode.textContent, 'Loading runtime options...');
|
||||
assert.deepEqual(input.syncCalls, ['sync']);
|
||||
assert.equal(input.state.runtimeOptionsModalOpen, true);
|
||||
assert.equal(input.overlayClassList.contains('interactive'), true);
|
||||
assert.equal(input.modalClassList.contains('hidden'), false);
|
||||
assert.equal(input.statusNode.textContent, 'Loading runtime options...');
|
||||
assert.deepEqual(input.syncCalls, ['sync']);
|
||||
|
||||
deferred.resolve([
|
||||
{
|
||||
id: 'anki.autoUpdateNewCards',
|
||||
label: 'Auto-update new cards',
|
||||
scope: 'ankiConnect',
|
||||
valueType: 'boolean',
|
||||
value: true,
|
||||
allowedValues: [true, false],
|
||||
requiresRestart: false,
|
||||
},
|
||||
]);
|
||||
await flushAsyncWork();
|
||||
deferred.resolve([
|
||||
{
|
||||
id: 'anki.autoUpdateNewCards',
|
||||
label: 'Auto-update new cards',
|
||||
scope: 'ankiConnect',
|
||||
valueType: 'boolean',
|
||||
value: true,
|
||||
allowedValues: [true, false],
|
||||
requiresRestart: false,
|
||||
},
|
||||
]);
|
||||
await flushAsyncWork();
|
||||
|
||||
assert.equal(
|
||||
input.statusNode.textContent,
|
||||
'Use arrow keys. Click value to cycle. Enter or double-click to apply.',
|
||||
);
|
||||
assert.equal(input.statusNode.classList.contains('error'), false);
|
||||
});
|
||||
assert.equal(
|
||||
input.statusNode.textContent,
|
||||
'Use arrow keys. Click value to cycle. Enter or double-click to apply.',
|
||||
);
|
||||
assert.equal(input.statusNode.classList.contains('error'), false);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
test('openRuntimeOptionsModal keeps modal visible when loading fails', async () => {
|
||||
const deferred = createDeferred<RuntimeOptionState[]>();
|
||||
|
||||
await withRuntimeOptionsModal(() => deferred.promise, async (input) => {
|
||||
input.modal.openRuntimeOptionsModal();
|
||||
deferred.reject(new Error('boom'));
|
||||
await flushAsyncWork();
|
||||
await withRuntimeOptionsModal(
|
||||
() => deferred.promise,
|
||||
async (input) => {
|
||||
input.modal.openRuntimeOptionsModal();
|
||||
deferred.reject(new Error('boom'));
|
||||
await flushAsyncWork();
|
||||
|
||||
assert.equal(input.state.runtimeOptionsModalOpen, true);
|
||||
assert.equal(input.overlayClassList.contains('interactive'), true);
|
||||
assert.equal(input.modalClassList.contains('hidden'), false);
|
||||
assert.equal(input.statusNode.textContent, 'Failed to load runtime options');
|
||||
assert.equal(input.statusNode.classList.contains('error'), true);
|
||||
});
|
||||
assert.equal(input.state.runtimeOptionsModalOpen, true);
|
||||
assert.equal(input.overlayClassList.contains('interactive'), true);
|
||||
assert.equal(input.modalClassList.contains('hidden'), false);
|
||||
assert.equal(input.statusNode.textContent, 'Failed to load runtime options');
|
||||
assert.equal(input.statusNode.classList.contains('error'), true);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
@@ -96,6 +96,10 @@ const OVERLAY_SHORTCUTS: Array<{
|
||||
{ key: 'markAudioCard', label: 'Mark audio card' },
|
||||
{ key: 'openRuntimeOptions', label: 'Open runtime options' },
|
||||
{ key: 'openJimaku', label: 'Open jimaku' },
|
||||
{ key: 'openSessionHelp', label: 'Open session help' },
|
||||
{ key: 'openControllerSelect', label: 'Open controller select' },
|
||||
{ key: 'openControllerDebug', label: 'Open controller debug' },
|
||||
{ key: 'toggleSubtitleSidebar', label: 'Toggle subtitle sidebar' },
|
||||
{ key: 'toggleVisibleOverlayGlobal', label: 'Show/hide visible overlay' },
|
||||
];
|
||||
|
||||
@@ -104,11 +108,12 @@ function buildOverlayShortcutSections(shortcuts: RuntimeShortcutConfig): Session
|
||||
|
||||
for (const shortcut of OVERLAY_SHORTCUTS) {
|
||||
const keybind = shortcuts[shortcut.key];
|
||||
if (typeof keybind !== 'string') continue;
|
||||
if (keybind.trim().length === 0) continue;
|
||||
|
||||
rows.push({
|
||||
shortcut: formatKeybinding(keybind),
|
||||
shortcut:
|
||||
typeof keybind === 'string' && keybind.trim().length > 0
|
||||
? formatKeybinding(keybind)
|
||||
: 'Unbound',
|
||||
action: shortcut.label,
|
||||
});
|
||||
}
|
||||
@@ -591,13 +596,17 @@ export function createSessionHelpModal(
|
||||
priorFocus = document.activeElement;
|
||||
|
||||
ctx.state.sessionHelpModalOpen = true;
|
||||
helpSections = [];
|
||||
helpFilterValue = '';
|
||||
options.syncSettingsModalSubtitleSuppression();
|
||||
ctx.dom.overlay.classList.add('interactive');
|
||||
ctx.dom.sessionHelpModal.classList.remove('hidden');
|
||||
ctx.dom.sessionHelpModal.setAttribute('aria-hidden', 'false');
|
||||
ctx.dom.sessionHelpModal.setAttribute('tabindex', '-1');
|
||||
ctx.dom.sessionHelpFilter.value = '';
|
||||
helpFilterValue = '';
|
||||
ctx.state.sessionHelpSelectedIndex = 0;
|
||||
ctx.dom.sessionHelpContent.innerHTML = '';
|
||||
ctx.dom.sessionHelpContent.classList.remove('session-help-content-no-results');
|
||||
if (ctx.platform.shouldToggleMouseIgnore) {
|
||||
window.electronAPI.setIgnoreMouseEvents(false);
|
||||
}
|
||||
|
||||
@@ -130,7 +130,8 @@ test('visible yomitan popup host keeps overlay interactive even when cached popu
|
||||
},
|
||||
document: {
|
||||
querySelectorAll: (selector: string) =>
|
||||
selector === '[data-subminer-yomitan-popup-host="true"][data-subminer-yomitan-popup-visible="true"]'
|
||||
selector ===
|
||||
'[data-subminer-yomitan-popup-host="true"][data-subminer-yomitan-popup-visible="true"]'
|
||||
? [{ getAttribute: () => 'true' }]
|
||||
: [],
|
||||
},
|
||||
|
||||
@@ -174,6 +174,12 @@ const keyboardHandlers = createKeyboardHandlers(ctx, {
|
||||
handleControllerDebugKeydown: controllerDebugModal.handleControllerDebugKeydown,
|
||||
handleSessionHelpKeydown: sessionHelpModal.handleSessionHelpKeydown,
|
||||
openSessionHelpModal: sessionHelpModal.openSessionHelpModal,
|
||||
openControllerSelectModal: () => {
|
||||
controllerSelectModal.openControllerSelectModal();
|
||||
},
|
||||
openControllerDebugModal: () => {
|
||||
controllerDebugModal.openControllerDebugModal();
|
||||
},
|
||||
appendClipboardVideoToQueue: () => {
|
||||
void window.electronAPI.appendClipboardVideoToQueue();
|
||||
},
|
||||
@@ -431,11 +437,7 @@ function registerModalOpenHandlers(): void {
|
||||
});
|
||||
window.electronAPI.onOpenSessionHelp(() => {
|
||||
runGuarded('session-help:open', () => {
|
||||
sessionHelpModal.openSessionHelpModal({
|
||||
bindingKey: 'KeyH',
|
||||
fallbackUsed: false,
|
||||
fallbackUnavailable: false,
|
||||
});
|
||||
sessionHelpModal.openSessionHelpModal(keyboardHandlers.getSessionHelpOpeningInfo());
|
||||
window.electronAPI.notifyOverlayModalOpened('session-help');
|
||||
});
|
||||
});
|
||||
@@ -508,8 +510,8 @@ function registerKeyboardCommandHandlers(): void {
|
||||
});
|
||||
|
||||
window.electronAPI.onSubtitleSidebarToggle(() => {
|
||||
runGuarded('subtitle-sidebar:toggle', () => {
|
||||
void subtitleSidebarModal.toggleSubtitleSidebarModal();
|
||||
runGuardedAsync('subtitle-sidebar:toggle', async () => {
|
||||
await subtitleSidebarModal.toggleSubtitleSidebarModal();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -73,7 +73,10 @@ function queryPopupElements<T extends Element>(
|
||||
}
|
||||
|
||||
export function isYomitanPopupVisible(root: ParentNode | null | undefined = document): boolean {
|
||||
const visiblePopupHosts = queryPopupElements<HTMLElement>(root, YOMITAN_POPUP_VISIBLE_HOST_SELECTOR);
|
||||
const visiblePopupHosts = queryPopupElements<HTMLElement>(
|
||||
root,
|
||||
YOMITAN_POPUP_VISIBLE_HOST_SELECTOR,
|
||||
);
|
||||
if (visiblePopupHosts.length > 0) {
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -28,7 +28,11 @@ const SESSION_ACTION_IDS: SessionActionId[] = [
|
||||
'mineSentenceMultiple',
|
||||
'toggleSecondarySub',
|
||||
'markAudioCard',
|
||||
'toggleSubtitleSidebar',
|
||||
'openRuntimeOptions',
|
||||
'openSessionHelp',
|
||||
'openControllerSelect',
|
||||
'openControllerDebug',
|
||||
'openJimaku',
|
||||
'openYoutubePicker',
|
||||
'openPlaylistBrowser',
|
||||
@@ -252,7 +256,9 @@ export function parseSessionActionDispatchRequest(
|
||||
|
||||
const payload = parseSessionActionPayload(value.actionId, value.payload);
|
||||
if (payload === null) return null;
|
||||
return payload === undefined ? { actionId: value.actionId } : { actionId: value.actionId, payload };
|
||||
return payload === undefined
|
||||
? { actionId: value.actionId }
|
||||
: { actionId: value.actionId, payload };
|
||||
}
|
||||
|
||||
export function parseMpvCommand(value: unknown): Array<string | number> | null {
|
||||
|
||||
@@ -9,6 +9,7 @@ import type {
|
||||
CompiledSessionBinding,
|
||||
SessionActionId,
|
||||
SessionActionPayload,
|
||||
SessionBindingWarning,
|
||||
} from './session-bindings';
|
||||
import type {
|
||||
JimakuApiResponse,
|
||||
@@ -327,6 +328,7 @@ export interface ClipboardAppendResult {
|
||||
export interface ConfigHotReloadPayload {
|
||||
keybindings: Keybinding[];
|
||||
sessionBindings: CompiledSessionBinding[];
|
||||
sessionBindingWarnings: SessionBindingWarning[];
|
||||
subtitleStyle: SubtitleStyleConfig | null;
|
||||
subtitleSidebar: Required<SubtitleSidebarConfig>;
|
||||
secondarySubMode: SecondarySubMode;
|
||||
@@ -362,7 +364,10 @@ export interface ElectronAPI {
|
||||
getKeybindings: () => Promise<Keybinding[]>;
|
||||
getSessionBindings: () => Promise<CompiledSessionBinding[]>;
|
||||
getConfiguredShortcuts: () => Promise<Required<ShortcutsConfig>>;
|
||||
dispatchSessionAction: (actionId: SessionActionId, payload?: SessionActionPayload) => Promise<void>;
|
||||
dispatchSessionAction: (
|
||||
actionId: SessionActionId,
|
||||
payload?: SessionActionPayload,
|
||||
) => Promise<void>;
|
||||
getStatsToggleKey: () => Promise<string>;
|
||||
getMarkWatchedKey: () => Promise<string>;
|
||||
markActiveVideoWatched: () => Promise<boolean>;
|
||||
|
||||
@@ -62,9 +62,7 @@ export interface CompiledSessionActionBinding extends CompiledSessionBindingBase
|
||||
payload?: SessionActionPayload;
|
||||
}
|
||||
|
||||
export type CompiledSessionBinding =
|
||||
| CompiledMpvCommandBinding
|
||||
| CompiledSessionActionBinding;
|
||||
export type CompiledSessionBinding = CompiledMpvCommandBinding | CompiledSessionActionBinding;
|
||||
|
||||
export interface PluginSessionBindingsArtifact {
|
||||
version: 1;
|
||||
|
||||
@@ -79,11 +79,11 @@ export abstract class BaseWindowTracker {
|
||||
this.updateTargetWindowFocused(focused);
|
||||
}
|
||||
|
||||
protected updateGeometry(newGeometry: WindowGeometry | null): void {
|
||||
protected updateGeometry(newGeometry: WindowGeometry | null, initialFocused = true): void {
|
||||
if (newGeometry) {
|
||||
if (!this.windowFound) {
|
||||
this.windowFound = true;
|
||||
this.updateTargetWindowFocused(true);
|
||||
this.updateTargetWindowFocused(initialFocused);
|
||||
if (this.onWindowFound) this.onWindowFound(newGeometry);
|
||||
}
|
||||
|
||||
|
||||
72
src/window-trackers/mpv-socket-match.test.ts
Normal file
72
src/window-trackers/mpv-socket-match.test.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import test from 'node:test';
|
||||
import assert from 'node:assert/strict';
|
||||
import {
|
||||
filterMpvPollResultBySocketPath,
|
||||
matchesMpvSocketPathInCommandLine,
|
||||
} from './mpv-socket-match';
|
||||
import type { MpvPollResult } from './win32';
|
||||
|
||||
function createPollResult(commandLines: Array<string | null>): MpvPollResult {
|
||||
return {
|
||||
matches: commandLines.map((commandLine, index) => ({
|
||||
hwnd: index + 1,
|
||||
bounds: { x: index * 10, y: 0, width: 1280, height: 720 },
|
||||
area: 1280 * 720,
|
||||
isForeground: index === 0,
|
||||
commandLine,
|
||||
})),
|
||||
focusState: true,
|
||||
windowState: 'visible',
|
||||
};
|
||||
}
|
||||
|
||||
test('matchesMpvSocketPathInCommandLine accepts equals and space-delimited socket flags', () => {
|
||||
assert.equal(
|
||||
matchesMpvSocketPathInCommandLine(
|
||||
'mpv.exe --input-ipc-server=\\\\.\\pipe\\subminer-a video.mkv',
|
||||
'\\\\.\\pipe\\subminer-a',
|
||||
),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
matchesMpvSocketPathInCommandLine(
|
||||
'mpv.exe --input-ipc-server "\\\\.\\pipe\\subminer-b" video.mkv',
|
||||
'\\\\.\\pipe\\subminer-b',
|
||||
),
|
||||
true,
|
||||
);
|
||||
assert.equal(
|
||||
matchesMpvSocketPathInCommandLine(
|
||||
'mpv.exe --input-ipc-server=\\\\.\\pipe\\subminer-a video.mkv',
|
||||
'\\\\.\\pipe\\subminer-b',
|
||||
),
|
||||
false,
|
||||
);
|
||||
});
|
||||
|
||||
test('filterMpvPollResultBySocketPath keeps only matches for the requested socket path', () => {
|
||||
const result = filterMpvPollResultBySocketPath(
|
||||
createPollResult([
|
||||
'mpv.exe --input-ipc-server=\\\\.\\pipe\\subminer-a video-a.mkv',
|
||||
'mpv.exe --input-ipc-server=\\\\.\\pipe\\subminer-b video-b.mkv',
|
||||
null,
|
||||
]),
|
||||
'\\\\.\\pipe\\subminer-b',
|
||||
);
|
||||
|
||||
assert.deepEqual(
|
||||
result.matches.map((match) => match.hwnd),
|
||||
[2],
|
||||
);
|
||||
assert.equal(result.windowState, 'visible');
|
||||
});
|
||||
|
||||
test('matchesMpvSocketPathInCommandLine rejects socket-path prefix matches', () => {
|
||||
assert.equal(
|
||||
matchesMpvSocketPathInCommandLine(
|
||||
'mpv.exe --input-ipc-server=\\\\.\\pipe\\subminer-10 video.mkv',
|
||||
'\\\\.\\pipe\\subminer-1',
|
||||
),
|
||||
false,
|
||||
);
|
||||
});
|
||||
41
src/window-trackers/mpv-socket-match.ts
Normal file
41
src/window-trackers/mpv-socket-match.ts
Normal file
@@ -0,0 +1,41 @@
|
||||
import type { MpvPollResult } from './win32';
|
||||
|
||||
function escapeRegex(text: string): string {
|
||||
return text.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
|
||||
export function matchesMpvSocketPathInCommandLine(
|
||||
commandLine: string,
|
||||
targetSocketPath: string,
|
||||
): boolean {
|
||||
if (!commandLine || !targetSocketPath) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const escapedSocketPath = escapeRegex(targetSocketPath);
|
||||
return new RegExp(
|
||||
`(?:^|\\s)--input-ipc-server(?:=|\\s+)(?:"${escapedSocketPath}"|${escapedSocketPath})(?=\\s|$)`,
|
||||
'i',
|
||||
).test(commandLine);
|
||||
}
|
||||
|
||||
export function filterMpvPollResultBySocketPath(
|
||||
result: MpvPollResult,
|
||||
targetSocketPath?: string | null,
|
||||
): MpvPollResult {
|
||||
if (!targetSocketPath) {
|
||||
return result;
|
||||
}
|
||||
|
||||
const matches = result.matches.filter(
|
||||
(match) =>
|
||||
typeof match.commandLine === 'string' &&
|
||||
matchesMpvSocketPathInCommandLine(match.commandLine, targetSocketPath),
|
||||
);
|
||||
|
||||
return {
|
||||
matches,
|
||||
focusState: matches.some((match) => match.isForeground),
|
||||
windowState: matches.length > 0 ? 'visible' : 'not-found',
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,6 @@
|
||||
import { execFileSync } from 'node:child_process';
|
||||
import koffi from 'koffi';
|
||||
import { matchesMpvSocketPathInCommandLine } from './mpv-socket-match';
|
||||
|
||||
const user32 = koffi.load('user32.dll');
|
||||
const dwmapi = koffi.load('dwmapi.dll');
|
||||
@@ -126,6 +128,7 @@ export interface MpvWindowMatch {
|
||||
bounds: WindowBounds;
|
||||
area: number;
|
||||
isForeground: boolean;
|
||||
commandLine?: string | null;
|
||||
}
|
||||
|
||||
export interface MpvPollResult {
|
||||
@@ -170,12 +173,52 @@ function getProcessNameByPid(pid: number): string | null {
|
||||
}
|
||||
}
|
||||
|
||||
export function findMpvWindows(): MpvPollResult {
|
||||
const processCommandLineCache = new Map<number, string>();
|
||||
|
||||
function getProcessCommandLineByPid(pid: number): string | null {
|
||||
if (processCommandLineCache.has(pid)) {
|
||||
return processCommandLineCache.get(pid) ?? null;
|
||||
}
|
||||
|
||||
let commandLine: string | null = null;
|
||||
try {
|
||||
const output = execFileSync(
|
||||
'powershell.exe',
|
||||
[
|
||||
'-NoProfile',
|
||||
'-NonInteractive',
|
||||
'-ExecutionPolicy',
|
||||
'Bypass',
|
||||
'-Command',
|
||||
`$process = Get-CimInstance Win32_Process -Filter "ProcessId = ${pid}"; if ($process -and $process.CommandLine) { [Console]::Out.Write($process.CommandLine) }`,
|
||||
],
|
||||
{
|
||||
encoding: 'utf8',
|
||||
windowsHide: true,
|
||||
stdio: ['ignore', 'pipe', 'ignore'],
|
||||
timeout: 1500,
|
||||
},
|
||||
).trim();
|
||||
commandLine = output.length > 0 ? output : null;
|
||||
} catch {
|
||||
commandLine = null;
|
||||
}
|
||||
|
||||
if (commandLine !== null) {
|
||||
processCommandLineCache.set(pid, commandLine);
|
||||
} else {
|
||||
processCommandLineCache.delete(pid);
|
||||
}
|
||||
return commandLine;
|
||||
}
|
||||
|
||||
export function findMpvWindows(targetSocketPath?: string | null): MpvPollResult {
|
||||
const foregroundHwnd = GetForegroundWindow();
|
||||
const matches: MpvWindowMatch[] = [];
|
||||
let hasMinimized = false;
|
||||
let hasFocused = false;
|
||||
const processNameCache = new Map<number, string | null>();
|
||||
const processCommandLineLookupCache = new Map<number, string | null>();
|
||||
|
||||
const cb = koffi.register((hwnd: number, _lParam: number) => {
|
||||
if (!IsWindowVisible(hwnd)) return true;
|
||||
@@ -193,6 +236,18 @@ export function findMpvWindows(): MpvPollResult {
|
||||
|
||||
if (!processName || processName.toLowerCase() !== 'mpv') return true;
|
||||
|
||||
let commandLine: string | null = null;
|
||||
if (targetSocketPath) {
|
||||
commandLine = processCommandLineLookupCache.get(pidValue) ?? null;
|
||||
if (!processCommandLineLookupCache.has(pidValue)) {
|
||||
commandLine = getProcessCommandLineByPid(pidValue);
|
||||
processCommandLineLookupCache.set(pidValue, commandLine);
|
||||
}
|
||||
if (!commandLine || !matchesMpvSocketPathInCommandLine(commandLine, targetSocketPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
if (IsIconic(hwnd)) {
|
||||
hasMinimized = true;
|
||||
return true;
|
||||
@@ -209,6 +264,7 @@ export function findMpvWindows(): MpvPollResult {
|
||||
bounds,
|
||||
area: bounds.width * bounds.height,
|
||||
isForeground,
|
||||
commandLine,
|
||||
});
|
||||
|
||||
return true;
|
||||
@@ -290,10 +346,18 @@ export function bindOverlayAboveMpv(overlayHwnd: number, mpvHwnd: number): void
|
||||
|
||||
let insertAfter = HWND_TOP;
|
||||
if (windowAboveMpv !== 0) {
|
||||
const aboveExStyle = GetWindowLongW(windowAboveMpv, GWL_EXSTYLE);
|
||||
const aboveIsTopmost = (aboveExStyle & WS_EX_TOPMOST) !== 0;
|
||||
if (aboveIsTopmost === mpvIsTopmost) {
|
||||
insertAfter = windowAboveMpv;
|
||||
try {
|
||||
resetLastError();
|
||||
const aboveExStyle = assertGetWindowLongSucceeded(
|
||||
'bindOverlayAboveMpv window above style',
|
||||
GetWindowLongW(windowAboveMpv, GWL_EXSTYLE),
|
||||
);
|
||||
const aboveIsTopmost = (aboveExStyle & WS_EX_TOPMOST) !== 0;
|
||||
if (aboveIsTopmost === mpvIsTopmost) {
|
||||
insertAfter = windowAboveMpv;
|
||||
}
|
||||
} catch {
|
||||
insertAfter = HWND_TOP;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -26,7 +26,8 @@ export function findWindowsMpvTargetWindowHandle(result?: MpvPollResult): number
|
||||
const poll = result ?? loadWin32().findMpvWindows();
|
||||
const focused = poll.matches.find((match) => match.isForeground);
|
||||
const best =
|
||||
focused ?? [...poll.matches].sort((a, b) => b.area - a.area || b.bounds.width - a.bounds.width)[0];
|
||||
focused ??
|
||||
[...poll.matches].sort((a, b) => b.area - a.area || b.bounds.width - a.bounds.width)[0];
|
||||
return best?.hwnd ?? null;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,9 @@ import { WindowsWindowTracker } from './windows-tracker';
|
||||
import type { MpvPollResult } from './win32';
|
||||
|
||||
function mpvVisible(
|
||||
overrides: Partial<MpvPollResult & { x?: number; y?: number; width?: number; height?: number; focused?: boolean }> = {},
|
||||
overrides: Partial<
|
||||
MpvPollResult & { x?: number; y?: number; width?: number; height?: number; focused?: boolean }
|
||||
> = {},
|
||||
): MpvPollResult {
|
||||
return {
|
||||
matches: [
|
||||
@@ -70,6 +72,22 @@ test('WindowsWindowTracker updates geometry from poll output', () => {
|
||||
assert.equal(tracker.isTargetWindowFocused(), true);
|
||||
});
|
||||
|
||||
test('WindowsWindowTracker preserves an unfocused initial match', () => {
|
||||
const tracker = new WindowsWindowTracker(undefined, {
|
||||
pollMpvWindows: () => mpvVisible({ x: 10, y: 20, width: 1280, height: 720, focused: false }),
|
||||
});
|
||||
|
||||
(tracker as unknown as { pollGeometry: () => void }).pollGeometry();
|
||||
|
||||
assert.deepEqual(tracker.getGeometry(), {
|
||||
x: 10,
|
||||
y: 20,
|
||||
width: 1280,
|
||||
height: 720,
|
||||
});
|
||||
assert.equal(tracker.isTargetWindowFocused(), false);
|
||||
});
|
||||
|
||||
test('WindowsWindowTracker clears geometry for poll misses', () => {
|
||||
const tracker = new WindowsWindowTracker(undefined, {
|
||||
pollMpvWindows: () => mpvNotFound,
|
||||
|
||||
@@ -32,9 +32,8 @@ type WindowsTrackerDeps = {
|
||||
};
|
||||
|
||||
function defaultPollMpvWindows(_targetMpvSocketPath?: string | null): MpvPollResult {
|
||||
void _targetMpvSocketPath;
|
||||
const win32 = require('./win32') as typeof import('./win32');
|
||||
return win32.findMpvWindows();
|
||||
return win32.findMpvWindows(_targetMpvSocketPath);
|
||||
}
|
||||
|
||||
export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
@@ -51,11 +50,13 @@ export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
private trackingLossStartedAtMs: number | null = null;
|
||||
private targetWindowMinimized = false;
|
||||
private readonly targetMpvSocketPath: string | null;
|
||||
private currentTargetWindowHwnd: number | null = null;
|
||||
|
||||
constructor(_targetMpvSocketPath?: string, deps: WindowsTrackerDeps = {}) {
|
||||
super();
|
||||
this.targetMpvSocketPath = _targetMpvSocketPath?.trim() || null;
|
||||
this.pollMpvWindows = deps.pollMpvWindows ?? (() => defaultPollMpvWindows(this.targetMpvSocketPath));
|
||||
this.pollMpvWindows =
|
||||
deps.pollMpvWindows ?? (() => defaultPollMpvWindows(this.targetMpvSocketPath));
|
||||
this.maxConsecutiveMisses = Math.max(1, Math.floor(deps.maxConsecutiveMisses ?? 2));
|
||||
this.trackingLossGraceMs = Math.max(0, Math.floor(deps.trackingLossGraceMs ?? 1_500));
|
||||
this.minimizedTrackingLossGraceMs = Math.max(
|
||||
@@ -81,6 +82,10 @@ export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
return this.targetWindowMinimized;
|
||||
}
|
||||
|
||||
getTargetWindowHandle(): number | null {
|
||||
return this.currentTargetWindowHwnd;
|
||||
}
|
||||
|
||||
private maybeLogPollError(error: Error): void {
|
||||
const now = Date.now();
|
||||
const fingerprint = error.message;
|
||||
@@ -122,7 +127,7 @@ export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
|
||||
private selectBestMatch(
|
||||
result: MpvPollResult,
|
||||
): { geometry: WindowGeometry; focused: boolean } | null {
|
||||
): { geometry: WindowGeometry; focused: boolean; hwnd: number } | null {
|
||||
if (result.matches.length === 0) return null;
|
||||
|
||||
const focusedMatch = result.matches.find((m) => m.isForeground);
|
||||
@@ -133,6 +138,7 @@ export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
return {
|
||||
geometry: best.bounds,
|
||||
focused: best.isForeground,
|
||||
hwnd: best.hwnd,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -147,25 +153,29 @@ export class WindowsWindowTracker extends BaseWindowTracker {
|
||||
if (best) {
|
||||
this.resetTrackingLossState();
|
||||
this.targetWindowMinimized = false;
|
||||
this.currentTargetWindowHwnd = best.hwnd;
|
||||
this.updateGeometry(best.geometry, best.focused);
|
||||
this.updateTargetWindowFocused(best.focused);
|
||||
this.updateGeometry(best.geometry);
|
||||
return;
|
||||
}
|
||||
|
||||
if (result.windowState === 'minimized') {
|
||||
this.targetWindowMinimized = true;
|
||||
this.currentTargetWindowHwnd = null;
|
||||
this.updateTargetWindowFocused(false);
|
||||
this.registerTrackingMiss(this.minimizedTrackingLossGraceMs);
|
||||
return;
|
||||
}
|
||||
|
||||
this.targetWindowMinimized = false;
|
||||
this.currentTargetWindowHwnd = null;
|
||||
this.updateTargetWindowFocused(false);
|
||||
this.registerTrackingMiss();
|
||||
} catch (error: unknown) {
|
||||
const err = error instanceof Error ? error : new Error(String(error));
|
||||
this.maybeLogPollError(err);
|
||||
this.targetWindowMinimized = false;
|
||||
this.currentTargetWindowHwnd = null;
|
||||
this.updateTargetWindowFocused(false);
|
||||
this.registerTrackingMiss();
|
||||
} finally {
|
||||
|
||||
@@ -93,7 +93,7 @@ export function AnimeTab({
|
||||
<div className="flex items-center gap-3">
|
||||
<input
|
||||
type="text"
|
||||
placeholder="Search anime..."
|
||||
placeholder="Search library..."
|
||||
value={search}
|
||||
onChange={(e) => setSearch(e.target.value)}
|
||||
className="flex-1 bg-ctp-surface0 border border-ctp-surface1 rounded-lg px-3 py-2 text-sm text-ctp-text placeholder:text-ctp-overlay2 focus:outline-none focus:border-ctp-blue"
|
||||
@@ -125,12 +125,12 @@ export function AnimeTab({
|
||||
))}
|
||||
</div>
|
||||
<div className="text-xs text-ctp-overlay2 shrink-0">
|
||||
{filtered.length} anime · {formatDuration(totalMs)}
|
||||
{filtered.length} titles · {formatDuration(totalMs)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{filtered.length === 0 ? (
|
||||
<div className="text-sm text-ctp-overlay2 p-4">No anime found</div>
|
||||
<div className="text-sm text-ctp-overlay2 p-4">No titles found</div>
|
||||
) : (
|
||||
<div className={`grid ${GRID_CLASSES[cardSize]} gap-4`}>
|
||||
{filtered.map((item) => (
|
||||
|
||||
60
stats/src/components/anime/EpisodeDetail.test.tsx
Normal file
60
stats/src/components/anime/EpisodeDetail.test.tsx
Normal file
@@ -0,0 +1,60 @@
|
||||
import assert from 'node:assert/strict';
|
||||
import test from 'node:test';
|
||||
import { filterCardEvents } from './EpisodeDetail';
|
||||
import type { EpisodeCardEvent } from '../../types/stats';
|
||||
|
||||
function makeEvent(over: Partial<EpisodeCardEvent> & { eventId: number }): EpisodeCardEvent {
|
||||
return {
|
||||
sessionId: 1,
|
||||
tsMs: 0,
|
||||
cardsDelta: 1,
|
||||
noteIds: [],
|
||||
...over,
|
||||
};
|
||||
}
|
||||
|
||||
test('filterCardEvents: before load, returns all events unchanged', () => {
|
||||
const ev1 = makeEvent({ eventId: 1, noteIds: [101] });
|
||||
const ev2 = makeEvent({ eventId: 2, noteIds: [102] });
|
||||
const noteInfos = new Map(); // empty — simulates pre-load state
|
||||
const result = filterCardEvents([ev1, ev2], noteInfos, /* noteInfosLoaded */ false);
|
||||
assert.equal(result.length, 2, 'should return both events before load');
|
||||
assert.deepEqual(result[0]?.noteIds, [101]);
|
||||
assert.deepEqual(result[1]?.noteIds, [102]);
|
||||
});
|
||||
|
||||
test('filterCardEvents: after load, drops noteIds not in noteInfos', () => {
|
||||
const ev1 = makeEvent({ eventId: 1, noteIds: [101] }); // survives
|
||||
const ev2 = makeEvent({ eventId: 2, noteIds: [102] }); // deleted from Anki
|
||||
const noteInfos = new Map([[101, { noteId: 101, expression: '食べる' }]]);
|
||||
const result = filterCardEvents([ev1, ev2], noteInfos, /* noteInfosLoaded */ true);
|
||||
assert.equal(result.length, 1, 'should drop event whose noteId was deleted from Anki');
|
||||
assert.equal(result[0]?.eventId, 1);
|
||||
assert.deepEqual(result[0]?.noteIds, [101]);
|
||||
});
|
||||
|
||||
test('filterCardEvents: after load, legacy rollup events (empty noteIds, positive cardsDelta) are kept', () => {
|
||||
const rollup = makeEvent({ eventId: 3, noteIds: [], cardsDelta: 5 });
|
||||
const noteInfos = new Map<number, { noteId: number; expression: string }>();
|
||||
const result = filterCardEvents([rollup], noteInfos, true);
|
||||
assert.equal(result.length, 1, 'legacy rollup event should survive filtering');
|
||||
assert.equal(result[0]?.cardsDelta, 5);
|
||||
});
|
||||
|
||||
test('filterCardEvents: after load, event with multiple noteIds keeps surviving ones', () => {
|
||||
const ev = makeEvent({ eventId: 4, noteIds: [201, 202, 203] });
|
||||
const noteInfos = new Map([
|
||||
[201, { noteId: 201, expression: 'A' }],
|
||||
[203, { noteId: 203, expression: 'C' }],
|
||||
]);
|
||||
const result = filterCardEvents([ev], noteInfos, true);
|
||||
assert.equal(result.length, 1, 'event with surviving noteIds should be kept');
|
||||
assert.deepEqual(result[0]?.noteIds, [201, 203], 'only surviving noteIds should remain');
|
||||
});
|
||||
|
||||
test('filterCardEvents: after load, event where all noteIds deleted is dropped', () => {
|
||||
const ev = makeEvent({ eventId: 5, noteIds: [301, 302] });
|
||||
const noteInfos = new Map<number, { noteId: number; expression: string }>();
|
||||
const result = filterCardEvents([ev], noteInfos, true);
|
||||
assert.equal(result.length, 0, 'event with all noteIds deleted should be dropped');
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user