Compare commits

...

9 Commits

29 changed files with 740 additions and 232 deletions

1
.gitignore vendored
View File

@@ -9,6 +9,7 @@ out/
dist/
release/
build/yomitan/
coverage/
# Launcher build artifact (produced by make build-launcher)
/subminer

View File

@@ -20,7 +20,7 @@ Priority keys:
| ID | Pri | Status | Area | Title |
| ------ | --- | ------ | -------------- | --------------------------------------------------- |
| SM-013 | P1 | doing | review-followup | Address PR #36 CodeRabbit action items |
| SM-013 | P1 | done | review-followup | Address PR #36 CodeRabbit action items |
## Ready
@@ -241,7 +241,7 @@ Done:
Title: Address PR #36 CodeRabbit action items
Priority: P1
Status: doing
Status: done
Scope:
- `plugins/subminer-workflow/skills/subminer-change-verification/scripts/verify_subminer_change.sh`
@@ -251,7 +251,16 @@ Scope:
- `src/core/services/immersion-tracker/maintenance.ts`
- `src/main/boot/services.ts`
- `src/main/character-dictionary-runtime/zip.test.ts`
Acceptance:
Acceptance:
- fix valid open CodeRabbit findings on PR #36
- add focused regression coverage for behavior changes where practical
- verify touched tests plus typecheck stay green
Done:
- hardened `--artifact-dir` validation in the verification script
- fixed trend aggregation rounding and monthly ratio bucketing
- preserved unwatched anime episodes in episode queries
- restored seconds-based aggregate timestamps in shared maintenance
- fixed the startup refactor compile break by making the predicates local at the call site
- verified with `bun test src/core/services/immersion-tracker/__tests__/query.test.ts src/core/services/immersion-tracker/__tests__/query-split-modules.test.ts` and `bun run typecheck`

View File

@@ -1,5 +1,24 @@
# Changelog
## v0.10.0 (2026-03-29)
### Changed
- Integrations: Replaced the deprecated Discord Rich Presence wrapper with the maintained `@xhayper/discord-rpc` package.
### Fixed
- Stats: Fixed stats startup so the immersion tracker can run when `Bun.serve` is unavailable.
- Stats: Stats server now falls back to a Node `http` listener in Electron/runtime paths that do not expose Bun.
### Internal
- Release: Added a maintained source coverage lane that shards Bun coverage one test file at a time and merges LCOV output into `coverage/test-src/lcov.info`.
- Release: CI and release quality-gate now upload the merged source-lane LCOV artifact for inspection.
- Runtime: Extracted remaining inline runtime logic from `src/main.ts` into dedicated runtime modules and composer helpers.
- Runtime: Added focused regression tests for the extracted runtime/composer boundaries.
- Runtime: Updated task tracking notes to mark TASK-238.6 complete and confirm follow-on boot-phase split can be deferred.
- Runtime: Split `src/main.ts` boot wiring into dedicated `src/main/boot/services.ts`, `src/main/boot/runtimes.ts`, and `src/main/boot/handlers.ts` modules.
- Runtime: Added focused tests for the new boot-phase seams and kept the startup/typecheck/build verification lanes green.
- Runtime: Updated internal architecture/task docs to record the boot-phase split and new ownership boundary.
## v0.9.3 (2026-03-25)
### Changed

View File

@@ -0,0 +1,35 @@
---
id: TASK-243
title: 'Assess and address PR #36 latest CodeRabbit review round'
status: Done
assignee: []
created_date: '2026-03-29 07:39'
updated_date: '2026-03-29 07:41'
labels:
- code-review
- pr-36
dependencies: []
references:
- 'https://github.com/ksyasuda/SubMiner/pull/36'
priority: high
ordinal: 3600
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Inspect the latest CodeRabbit review round on PR #36, verify each actionable comment against the current branch, implement the confirmed fixes, and verify the touched paths.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 Confirmed review comments are implemented or explicitly deferred with rationale.
- [ ] #2 Touched paths are verified with the smallest sufficient test/build lane.
- [ ] #3 Current PR feedback is reduced to resolved or intentionally deferred suggestions.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Addressed the confirmed latest CodeRabbit review items on PR #36. `scripts/run-coverage-lane.ts` now uses the Bun-style `import.meta.main` entrypoint check with a local ts-ignore to preserve the repo's CommonJS typecheck settings. `src/core/services/immersion-tracker/maintenance.ts` no longer shadows the imported `nowMs` helper in retention functions. `src/main.ts` now centralizes the startup-mode predicates behind a shared helper and releases `resolvedSource.cleanup` on the cached-subtitle fast path so materialized sources do not leak.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,35 @@
---
id: TASK-244
title: 'Assess and address PR #36 latest CodeRabbit review round 2'
status: Done
assignee: []
created_date: '2026-03-29 08:09'
updated_date: '2026-03-29 08:10'
labels:
- code-review
- pr-36
dependencies: []
references:
- 'https://github.com/ksyasuda/SubMiner/pull/36'
priority: high
ordinal: 3610
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Inspect the newest CodeRabbit review round on PR #36, verify the actionable comment against the current branch, implement the confirmed fix, and verify the touched path.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [ ] #1 The actionable review comment is implemented or explicitly deferred with rationale.
- [ ] #2 Touched path is verified with the smallest sufficient test lane.
- [ ] #3 Current PR feedback is reduced to resolved or intentionally deferred suggestions.
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Addressed the actionable latest CodeRabbit comment on PR #36. `src/core/services/immersion-tracker/maintenance.ts` now skips retention deletions when a window is disabled with `Infinity`, so `toDbMs(...)` is only called for finite retention values. Added a regression test in `maintenance.test.ts` that verifies disabled retention windows preserve session events, telemetry, and sessions while returning zero deletions.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,35 @@
---
id: TASK-242
title: Fix stats server Bun fallback in coverage lane
status: Done
assignee: []
created_date: '2026-03-29 07:31'
updated_date: '2026-03-29 07:37'
labels:
- ci
- bug
milestone: cleanup
dependencies: []
references:
- 'PR #36'
priority: high
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Coverage CI fails when `startStatsServer` reaches the Bun server seam under the maintained source lane. Add a runtime fallback that works when `Bun.serve` is unavailable and keep the stats-server startup path testable.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 `bun run test:coverage:src` passes in GitHub CI
- [x] #2 `startStatsServer` uses `Bun.serve` when present and a Node server fallback otherwise
- [x] #3 Regression coverage exists for the fallback startup path
<!-- AC:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Fixed the CI failure in the coverage lane by replacing the Bun-only stats server path with a Bun-or-node/http startup fallback and by normalizing setup window options so undefined BrowserWindow fields are omitted. Verified the exact coverage lane under Bun 1.3.5 and confirmed the GitHub Actions run for PR #36 completed successfully.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,68 @@
---
id: TASK-245
title: Cut minor release v0.10.0 for docs and release prep
status: Done
assignee:
- '@codex'
created_date: '2026-03-29 08:10'
updated_date: '2026-03-29 08:13'
labels:
- release
- docs
- minor
dependencies: []
references:
- /home/sudacode/projects/japanese/SubMiner/package.json
- /home/sudacode/projects/japanese/SubMiner/README.md
- /home/sudacode/projects/japanese/SubMiner/docs/RELEASING.md
- /home/sudacode/projects/japanese/SubMiner/docs/README.md
- /home/sudacode/projects/japanese/SubMiner/docs-site/changelog.md
- /home/sudacode/projects/japanese/SubMiner/CHANGELOG.md
- /home/sudacode/projects/japanese/SubMiner/release/release-notes.md
priority: high
ordinal: 54850
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Prepare the next 0-ver minor release cut as `v0.10.0`, keeping release-facing docs, backlog, and changelog artifacts aligned, then run the release-prep verification gate.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 Repository version metadata is updated to `0.10.0`.
- [x] #2 Release-facing docs and public changelog surfaces are aligned for the `v0.10.0` cut.
- [x] #3 `CHANGELOG.md` and `release/release-notes.md` contain the committed `v0.10.0` section and any consumed fragments are removed.
- [x] #4 Release-prep verification passes for changelog, config example, typecheck, tests, and build.
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Completed:
- Bumped `package.json` from `0.9.3` to `0.10.0`.
- Ran `bun run changelog:build --version 0.10.0 --date 2026-03-29`, which generated `CHANGELOG.md` and `release/release-notes.md` and removed the queued `changes/*.md` fragments.
- Updated `docs-site/changelog.md` with the public-facing `v0.10.0` summary.
Verification:
- `bun run changelog:lint`
- `bun run changelog:check --version 0.10.0`
- `bun run verify:config-example`
- `bun run typecheck`
- `bunx bun@1.3.5 run test:fast`
- `bunx bun@1.3.5 run test:env`
- `bunx bun@1.3.5 run build`
- `bunx bun@1.3.5 run docs:test`
- `bunx bun@1.3.5 run docs:build`
Notes:
- The local `bun` binary is `1.3.11`, which tripped Bun's nested `node:test` handling in `test:fast`; rerunning with the repo-pinned `bun@1.3.5` cleared the issue.
- No README content change was necessary for this cut.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Prepared the `v0.10.0` release cut locally. Bumped `package.json`, generated committed root changelog and release notes, updated the public docs changelog summary, and verified the release gate with the repo-pinned Bun `1.3.5` runtime. The release prep is green and ready for tagging/publishing when desired.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -0,0 +1,55 @@
---
id: TASK-246
title: Migrate Discord Rich Presence to maintained RPC wrapper
status: Done
assignee: []
created_date: '2026-03-29 08:17'
updated_date: '2026-03-29 08:22'
labels:
- dependency
- discord
- presence
dependencies: []
priority: medium
---
## Description
<!-- SECTION:DESCRIPTION:BEGIN -->
Replace the deprecated Discord Rich Presence wrapper with a maintained JavaScript alternative while preserving the current IPC-based presence behavior in the Electron main process.
<!-- SECTION:DESCRIPTION:END -->
## Acceptance Criteria
<!-- AC:BEGIN -->
- [x] #1 The app no longer depends on `discord-rpc`
- [x] #2 Discord Rich Presence still logs in and publishes activity updates from the main process
- [x] #3 Existing Discord presence tests continue to pass or are updated to cover the new client API
- [x] #4 The change is documented in the release notes or changelog fragment
<!-- AC:END -->
## Implementation Notes
<!-- SECTION:NOTES:BEGIN -->
Completed:
- Swapped the app's Discord RPC dependency from `discord-rpc` to `@xhayper/discord-rpc`.
- Extracted the client adapter into `src/main/runtime/discord-rpc-client.ts` so the main process can keep using a small wrapper around the maintained library.
- Added `src/main/runtime/discord-rpc-client.test.ts` to verify the adapter forwards login/activity/clear/destroy calls through `client.user`.
- Documented the dependency swap in `CHANGELOG.md`, `release/release-notes.md`, and `docs-site/changelog.md`.
Verification:
- `bunx bun@1.3.5 test src/main/runtime/discord-rpc-client.test.ts src/core/services/discord-presence.test.ts`
- `bunx bun@1.3.5 run changelog:lint`
- `bunx bun@1.3.5 run changelog:check --version 0.10.0`
- `bunx bun@1.3.5 run docs:test`
- `bunx bun@1.3.5 run docs:build`
Notes:
- The existing release prep artifacts for v0.10.0 were kept intact and updated in place.
- No README change was needed for this dependency swap.
<!-- SECTION:NOTES:END -->
## Final Summary
<!-- SECTION:FINAL_SUMMARY:BEGIN -->
Replaced the deprecated `discord-rpc` dependency with the maintained `@xhayper/discord-rpc` wrapper while preserving the main-process rich presence flow. Added a focused runtime wrapper test, kept the existing Discord presence service tests green, and documented the dependency swap in the release notes and changelog.
<!-- SECTION:FINAL_SUMMARY:END -->

View File

@@ -7,9 +7,9 @@
"dependencies": {
"@fontsource-variable/geist": "^5.2.8",
"@fontsource-variable/geist-mono": "^5.2.7",
"@xhayper/discord-rpc": "^1.3.3",
"axios": "^1.13.5",
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"hono": "^4.12.7",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",
@@ -37,6 +37,12 @@
"@develar/schema-utils": ["@develar/schema-utils@2.6.5", "", { "dependencies": { "ajv": "^6.12.0", "ajv-keywords": "^3.4.1" } }, "sha512-0cp4PsWQ/9avqTVMCtZ+GirikIA36ikvjtHweU4/j8yLtgObI0+JUPhYFScgwlteveGB1rt3Cm8UhN04XayDig=="],
"@discordjs/collection": ["@discordjs/collection@2.1.1", "", {}, "sha512-LiSusze9Tc7qF03sLCujF5iZp7K+vRNEDBZ86FT9aQAv3vxMLihUvKvpsCWiQ2DJq1tVckopKm1rxomgNUc9hg=="],
"@discordjs/rest": ["@discordjs/rest@2.6.1", "", { "dependencies": { "@discordjs/collection": "^2.1.1", "@discordjs/util": "^1.2.0", "@sapphire/async-queue": "^1.5.3", "@sapphire/snowflake": "^3.5.5", "@vladfrangu/async_event_emitter": "^2.4.6", "discord-api-types": "^0.38.40", "magic-bytes.js": "^1.13.0", "tslib": "^2.6.3", "undici": "6.24.1" } }, "sha512-wwQdgjeaoYFiaG+atbqx6aJDpqW7JHAo0HrQkBTbYzM3/PJ3GweQIpgElNcGZ26DCUOXMyawYd0YF7vtr+fZXg=="],
"@discordjs/util": ["@discordjs/util@1.2.0", "", { "dependencies": { "discord-api-types": "^0.38.33" } }, "sha512-3LKP7F2+atl9vJFhaBjn4nOaSWahZ/yWjOvA4e5pnXkt2qyXRCHLxoBQy81GFtLGCq7K9lPm9R517M1U+/90Qg=="],
"@electron/asar": ["@electron/asar@3.4.1", "", { "dependencies": { "commander": "^5.0.0", "glob": "^7.1.6", "minimatch": "^3.0.4" }, "bin": { "asar": "bin/asar.js" } }, "sha512-i4/rNPRS84t0vSRa2HorerGRXWyF4vThfHesw0dmcWHp+cspK743UanA0suA5Q5y8kzY2y6YKrvbIUn69BCAiA=="],
"@electron/fuses": ["@electron/fuses@1.8.0", "", { "dependencies": { "chalk": "^4.1.1", "fs-extra": "^9.0.1", "minimist": "^1.2.5" }, "bin": { "electron-fuses": "dist/bin.js" } }, "sha512-zx0EIq78WlY/lBb1uXlziZmDZI4ubcCXIMJ4uGjXzZW0nS19TjSPeXPAjzzTmKQlJUZm0SbmZhPKP7tuQ1SsEw=="],
@@ -143,6 +149,10 @@
"@pkgjs/parseargs": ["@pkgjs/parseargs@0.11.0", "", {}, "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="],
"@sapphire/async-queue": ["@sapphire/async-queue@1.5.5", "", {}, "sha512-cvGzxbba6sav2zZkH8GPf2oGk9yYoD5qrNWdu9fRehifgnFZJMV+nuy2nON2roRO4yQQ+v7MK/Pktl/HgfsUXg=="],
"@sapphire/snowflake": ["@sapphire/snowflake@3.5.5", "", {}, "sha512-xzvBr1Q1c4lCe7i6sRnrofxeO1QTP/LKQ6A6qy0iB4x5yfiSfARMEQEghojzTNALDTcv8En04qYNIco9/K9eZQ=="],
"@sindresorhus/is": ["@sindresorhus/is@4.6.0", "", {}, "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw=="],
"@szmarczak/http-timer": ["@szmarczak/http-timer@4.0.6", "", { "dependencies": { "defer-to-connect": "^2.0.0" } }, "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w=="],
@@ -171,6 +181,10 @@
"@types/yauzl": ["@types/yauzl@2.10.3", "", { "dependencies": { "@types/node": "*" } }, "sha512-oJoftv0LSuaDZE3Le4DbKX+KS9G36NzOeSap90UIK0yMA/NhKJhqlSGtNDORNRaIbQfzjXDrQa0ytJ6mNRGz/Q=="],
"@vladfrangu/async_event_emitter": ["@vladfrangu/async_event_emitter@2.4.7", "", {}, "sha512-Xfe6rpCTxSxfbswi/W/Pz7zp1WWSNn4A0eW4mLkQUewCrXXtMj31lCg+iQyTkh/CkusZSq9eDflu7tjEDXUY6g=="],
"@xhayper/discord-rpc": ["@xhayper/discord-rpc@1.3.3", "", { "dependencies": { "@discordjs/rest": "^2.6.1", "@vladfrangu/async_event_emitter": "^2.4.7", "discord-api-types": "^0.38.42", "ws": "^8.20.0" } }, "sha512-Ih48GHiua7TtZgKO+f0uZPhCeQqb84fY2qUys/oMh8UbUfiUkUJLVCmd/v2AK0/pV33euh0aqSXo7+9LiPSwGw=="],
"@xmldom/xmldom": ["@xmldom/xmldom@0.8.11", "", {}, "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw=="],
"abbrev": ["abbrev@3.0.1", "", {}, "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg=="],
@@ -209,8 +223,6 @@
"base64-js": ["base64-js@1.5.1", "", {}, "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA=="],
"bindings": ["bindings@1.5.0", "", { "dependencies": { "file-uri-to-path": "1.0.0" } }, "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ=="],
"bl": ["bl@4.1.0", "", { "dependencies": { "buffer": "^5.5.0", "inherits": "^2.0.4", "readable-stream": "^3.4.0" } }, "sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w=="],
"boolean": ["boolean@3.2.0", "", {}, "sha512-d0II/GO9uf9lfUHH2BQsjxzRJZBdsjgsBiW4BvhWk/3qoKwQFjIDVN19PfX8F2D/r9PCMTtLWjYVCFrpeYUzsw=="],
@@ -293,7 +305,7 @@
"dir-compare": ["dir-compare@4.2.0", "", { "dependencies": { "minimatch": "^3.0.5", "p-limit": "^3.1.0 " } }, "sha512-2xMCmOoMrdQIPHdsTawECdNPwlVFB9zGcz3kuhmBO6U3oU+UQjsue0i8ayLKpgBcm+hcXPMVSGUN9d+pvJ6+VQ=="],
"discord-rpc": ["discord-rpc@4.0.1", "", { "dependencies": { "node-fetch": "^2.6.1", "ws": "^7.3.1" }, "optionalDependencies": { "register-scheme": "github:devsnek/node-register-scheme" } }, "sha512-HOvHpbq5STRZJjQIBzwoKnQ0jHplbEWFWlPDwXXKm/bILh4nzjcg7mNqll0UY7RsjFoaXA7e/oYb/4lvpda2zA=="],
"discord-api-types": ["discord-api-types@0.38.43", "", {}, "sha512-sSoBf/nK6m7BGtw65mi+QBuvEWaHE8MMziFLqWL+gT6ME/BLg34dRSVKS3Husx40uU06bvxUc3/X+D9Y6/zAbw=="],
"dmg-builder": ["dmg-builder@26.8.2", "", { "dependencies": { "app-builder-lib": "26.8.2", "builder-util": "26.8.1", "fs-extra": "^10.1.0", "iconv-lite": "^0.6.2", "js-yaml": "^4.1.0" }, "optionalDependencies": { "dmg-license": "^1.0.11" } }, "sha512-DaWI+p4DOqiFVZFMovdGYammBOyJAiHHFWUTQ0Z7gNc0twfdIN0LvyJ+vFsgZEDR1fjgbpCj690IVtbYIsZObQ=="],
@@ -359,8 +371,6 @@
"fdir": ["fdir@6.5.0", "", { "peerDependencies": { "picomatch": "^3 || ^4" }, "optionalPeers": ["picomatch"] }, "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg=="],
"file-uri-to-path": ["file-uri-to-path@1.0.0", "", {}, "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw=="],
"filelist": ["filelist@1.0.6", "", { "dependencies": { "minimatch": "^5.0.1" } }, "sha512-5giy2PkLYY1cP39p17Ech+2xlpTRL9HLspOfEgm0L6CwBXBTgsK5ou0JtzYuepxkaQ/tvhCFIJ5uXo0OrM2DxA=="],
"follow-redirects": ["follow-redirects@1.15.11", "", {}, "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ=="],
@@ -477,6 +487,8 @@
"lru-cache": ["lru-cache@6.0.0", "", { "dependencies": { "yallist": "^4.0.0" } }, "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA=="],
"magic-bytes.js": ["magic-bytes.js@1.13.0", "", {}, "sha512-afO2mnxW7GDTXMm5/AoN1WuOcdoKhtgXjIvHmobqTD1grNplhGdv3PFOyjCVmrnOZBIT/gD/koDKpYG+0mvHcg=="],
"make-fetch-happen": ["make-fetch-happen@14.0.3", "", { "dependencies": { "@npmcli/agent": "^3.0.0", "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "ssri": "^12.0.0" } }, "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ=="],
"matcher": ["matcher@3.0.0", "", { "dependencies": { "escape-string-regexp": "^4.0.0" } }, "sha512-OkeDaAZ/bQCxeFAozM55PKcKU0yJMPGifLwV4Qgjitu+5MoAfSQN4lsLJeXZ1b8w0x+/Emda6MZgXS1jvsapng=="],
@@ -523,8 +535,6 @@
"node-api-version": ["node-api-version@0.2.1", "", { "dependencies": { "semver": "^7.3.5" } }, "sha512-2xP/IGGMmmSQpI1+O/k72jF/ykvZ89JeuKX3TLJAYPDVLUalrshrLHkeVcCCZqG/eEa635cr8IBYzgnDvM2O8Q=="],
"node-fetch": ["node-fetch@2.7.0", "", { "dependencies": { "whatwg-url": "^5.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "optionalPeers": ["encoding"] }, "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A=="],
"node-gyp": ["node-gyp@11.5.0", "", { "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", "make-fetch-happen": "^14.0.3", "nopt": "^8.0.0", "proc-log": "^5.0.0", "semver": "^7.3.5", "tar": "^7.4.3", "tinyglobby": "^0.2.12", "which": "^5.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" } }, "sha512-ra7Kvlhxn5V9Slyus0ygMa2h+UqExPqUIkfk7Pc8QTLT956JLSy51uWFwHtIYy0vI8cB4BDhc/S03+880My/LQ=="],
"nopt": ["nopt@8.1.0", "", { "dependencies": { "abbrev": "^3.0.0" }, "bin": { "nopt": "bin/nopt.js" } }, "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A=="],
@@ -587,8 +597,6 @@
"readable-stream": ["readable-stream@3.6.2", "", { "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA=="],
"register-scheme": ["register-scheme@github:devsnek/node-register-scheme#e7cc9a6", { "dependencies": { "bindings": "^1.3.0", "node-addon-api": "^1.3.0" } }, "devsnek-node-register-scheme-e7cc9a6", "sha512-VwUWN3aKIg/yn7T8axW20Y1+4wGALIQectBmkmwSJfLrCycpVepGP/+KHjXSL/Ga8N1SmewL49kESgIhW7HbWg=="],
"require-directory": ["require-directory@2.1.1", "", {}, "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="],
"resedit": ["resedit@1.7.2", "", { "dependencies": { "pe-library": "^0.4.1" } }, "sha512-vHjcY2MlAITJhC0eRD/Vv8Vlgmu9Sd3LX9zZvtGzU5ZImdTN3+d6e/4mnTyV8vEbyf1sgNIrWxhWlrys52OkEA=="],
@@ -673,14 +681,16 @@
"tmp-promise": ["tmp-promise@3.0.3", "", { "dependencies": { "tmp": "^0.2.0" } }, "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ=="],
"tr46": ["tr46@0.0.3", "", {}, "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw=="],
"truncate-utf8-bytes": ["truncate-utf8-bytes@1.0.2", "", { "dependencies": { "utf8-byte-length": "^1.0.1" } }, "sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ=="],
"tslib": ["tslib@2.8.1", "", {}, "sha512-oJFu94HQb+KVduSUQL7wnpmqnfmLsOA/nAh6b6EH0wCEoK0/mPeXU6c3wKDV83MkOuHPRHtSXKKU99IBazS/2w=="],
"type-fest": ["type-fest@0.13.1", "", {}, "sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici": ["undici@6.24.1", "", {}, "sha512-sC+b0tB1whOCzbtlx20fx3WgCXwkW627p4EA9uM+/tNNPkSS+eSEld6pAs9nDv7WbY1UUljBMYPtu9BCOrCWKA=="],
"undici-types": ["undici-types@7.18.2", "", {}, "sha512-AsuCzffGHJybSaRrmr5eHr81mwJU3kjw6M+uprWvCXiNeN9SOGwQ3Jn8jb8m3Z6izVgknn1R0FTCEAP2QrLY/w=="],
"unique-filename": ["unique-filename@4.0.0", "", { "dependencies": { "unique-slug": "^5.0.0" } }, "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ=="],
@@ -699,10 +709,6 @@
"wcwidth": ["wcwidth@1.0.1", "", { "dependencies": { "defaults": "^1.0.3" } }, "sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg=="],
"webidl-conversions": ["webidl-conversions@3.0.1", "", {}, "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ=="],
"whatwg-url": ["whatwg-url@5.0.0", "", { "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw=="],
"which": ["which@5.0.0", "", { "dependencies": { "isexe": "^3.1.1" }, "bin": { "node-which": "bin/which.js" } }, "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ=="],
"wrap-ansi": ["wrap-ansi@7.0.0", "", { "dependencies": { "ansi-styles": "^4.0.0", "string-width": "^4.1.0", "strip-ansi": "^6.0.0" } }, "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="],
@@ -769,8 +775,6 @@
"cross-spawn/which": ["which@2.0.2", "", { "dependencies": { "isexe": "^2.0.0" }, "bin": { "node-which": "./bin/node-which" } }, "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="],
"discord-rpc/ws": ["ws@7.5.10", "", { "peerDependencies": { "bufferutil": "^4.0.1", "utf-8-validate": "^5.0.2" }, "optionalPeers": ["bufferutil", "utf-8-validate"] }, "sha512-+dbF1tHwZpXcbOJdVOkzLDxZP1ailvSxM6ZweXTegylPny803bFhA+vqBYw4s31NSAk4S2Qz+AKXK9a4wkdjcQ=="],
"electron/@types/node": ["@types/node@22.19.15", "", { "dependencies": { "undici-types": "~6.21.0" } }, "sha512-F0R/h2+dsy5wJAUe3tAU6oqa2qbWY5TpNfL/RGmo1y38hiyO1w3x2jPtt76wmuaJI4DQnOBu21cNXQ2STIUUWg=="],
"electron-winstaller/fs-extra": ["fs-extra@7.0.1", "", { "dependencies": { "graceful-fs": "^4.1.2", "jsonfile": "^4.0.0", "universalify": "^0.1.0" } }, "sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw=="],

View File

@@ -1,5 +0,0 @@
type: internal
area: release
- Added a maintained source coverage lane that shards Bun coverage one test file at a time and merges LCOV output into `coverage/test-src/lcov.info`.
- CI and release quality-gate now upload the merged source-lane LCOV artifact for inspection.

View File

@@ -1,5 +0,0 @@
type: fixed
area: stats
- Fixed stats startup so the immersion tracker can run when `Bun.serve` is unavailable.
- Stats server now falls back to a Node `http` listener in Electron/runtime paths that do not expose Bun.

View File

@@ -1,6 +0,0 @@
type: internal
area: runtime
- Extracted remaining inline runtime logic from `src/main.ts` into dedicated runtime modules and composer helpers.
- Added focused regression tests for the extracted runtime/composer boundaries.
- Updated task tracking notes to mark TASK-238.6 complete and confirm follow-on boot-phase split can be deferred.

View File

@@ -1,6 +0,0 @@
type: internal
area: runtime
- Split `src/main.ts` boot wiring into dedicated `src/main/boot/services.ts`, `src/main/boot/runtimes.ts`, and `src/main/boot/handlers.ts` modules.
- Added focused tests for the new boot-phase seams and kept the startup/typecheck/build verification lanes green.
- Updated internal architecture/task docs to record the boot-phase split and new ownership boundary.

View File

@@ -0,0 +1,6 @@
type: changed
area: core
- Refactored startup, query, and workflow code into focused modules.
- Added repo-local workflow plugin shims and updated internal docs and verification helpers.
- Expanded tests around launcher, runtime, stats, and immersion-tracker behavior.

View File

@@ -1,5 +1,10 @@
# Changelog
## v0.10.0 (2026-03-29)
- Fixed stats startup so the immersion tracker can run when `Bun.serve` is unavailable.
- Added a Node `http` fallback for Electron/runtime paths that do not expose Bun, so stats keeps working there too.
- Updated Discord Rich Presence to the maintained `@xhayper/discord-rpc` wrapper.
## v0.9.3 (2026-03-25)
- Moved YouTube primary subtitle language defaults to `youtube.primarySubLanguages`.
- Removed the placeholder YouTube subtitle retime step; downloaded primary subtitle tracks are now used directly.

View File

@@ -1,6 +1,6 @@
{
"name": "subminer",
"version": "0.9.3",
"version": "0.10.0",
"description": "All-in-one sentence mining overlay with AnkiConnect and dictionary integration",
"packageManager": "bun@1.3.5",
"main": "dist/main-entry.js",
@@ -100,9 +100,9 @@
"dependencies": {
"@fontsource-variable/geist": "^5.2.8",
"@fontsource-variable/geist-mono": "^5.2.7",
"@xhayper/discord-rpc": "^1.3.3",
"axios": "^1.13.5",
"commander": "^14.0.3",
"discord-rpc": "^4.0.1",
"hono": "^4.12.7",
"jsonc-parser": "^3.3.1",
"libsql": "^0.5.22",

View File

@@ -1,7 +1,8 @@
import assert from 'node:assert/strict';
import { resolve } from 'node:path';
import test from 'node:test';
import { mergeLcovReports } from './run-coverage-lane';
import { mergeLcovReports, resolveCoverageDir } from './run-coverage-lane';
test('mergeLcovReports combines duplicate source-file counters across shard outputs', () => {
const merged = mergeLcovReports([
@@ -59,3 +60,15 @@ test('mergeLcovReports keeps distinct source files as separate records', () => {
assert.match(merged, /SF:src\/a\.ts[\s\S]*end_of_record/);
assert.match(merged, /SF:src\/b\.ts[\s\S]*end_of_record/);
});
test('resolveCoverageDir keeps coverage output inside the repository', () => {
const repoRoot = resolve('/tmp', 'subminer-repo-root');
assert.equal(resolveCoverageDir(repoRoot, []), resolve(repoRoot, 'coverage'));
assert.equal(
resolveCoverageDir(repoRoot, ['--coverage-dir', 'coverage/test-src']),
resolve(repoRoot, 'coverage/test-src'),
);
assert.throws(() => resolveCoverageDir(repoRoot, ['--coverage-dir', '../escape']));
assert.throws(() => resolveCoverageDir(repoRoot, ['--coverage-dir', '/tmp/escape']));
});

View File

@@ -1,6 +1,6 @@
import { existsSync, mkdirSync, readFileSync, readdirSync, rmSync, writeFileSync } from 'node:fs';
import { spawnSync } from 'node:child_process';
import { join, relative, resolve } from 'node:path';
import { isAbsolute, join, relative, resolve } from 'node:path';
type LaneConfig = {
roots: string[];
@@ -85,6 +85,15 @@ function parseCoverageDirArg(argv: string[]): string {
return 'coverage';
}
export function resolveCoverageDir(repoRootDir: string, argv: string[]): string {
const candidate = resolve(repoRootDir, parseCoverageDirArg(argv));
const rel = relative(repoRootDir, candidate);
if (isAbsolute(rel) || rel.startsWith('..')) {
throw new Error(`--coverage-dir must be within repository: ${candidate}`);
}
return candidate;
}
function parseLcovReport(report: string): LcovRecord[] {
const records: LcovRecord[] = [];
let current: LcovRecord | null = null;
@@ -251,7 +260,7 @@ function runCoverageLane(): number {
return 1;
}
const coverageDir = resolve(repoRoot, parseCoverageDirArg(process.argv.slice(3)));
const coverageDir = resolveCoverageDir(repoRoot, process.argv.slice(3));
const shardRoot = join(coverageDir, '.shards');
mkdirSync(coverageDir, { recursive: true });
rmSync(shardRoot, { recursive: true, force: true });
@@ -260,39 +269,43 @@ function runCoverageLane(): number {
const files = getLaneFiles(laneName);
const reports: string[] = [];
for (const [index, file] of files.entries()) {
const shardDir = join(shardRoot, `${String(index + 1).padStart(3, '0')}`);
const result = spawnSync(
'bun',
['test', '--coverage', '--coverage-reporter=lcov', '--coverage-dir', shardDir, `./${file}`],
{
cwd: repoRoot,
stdio: 'inherit',
},
);
try {
for (const [index, file] of files.entries()) {
const shardDir = join(shardRoot, `${String(index + 1).padStart(3, '0')}`);
const result = spawnSync(
'bun',
['test', '--coverage', '--coverage-reporter=lcov', '--coverage-dir', shardDir, `./${file}`],
{
cwd: repoRoot,
stdio: 'inherit',
},
);
if (result.error) {
throw result.error;
}
if ((result.status ?? 1) !== 0) {
return result.status ?? 1;
if (result.error) {
throw result.error;
}
if ((result.status ?? 1) !== 0) {
return result.status ?? 1;
}
const lcovPath = join(shardDir, 'lcov.info');
if (!existsSync(lcovPath)) {
process.stdout.write(`Skipping empty coverage shard for ${file}\n`);
continue;
}
reports.push(readFileSync(lcovPath, 'utf8'));
}
const lcovPath = join(shardDir, 'lcov.info');
if (!existsSync(lcovPath)) {
process.stdout.write(`Skipping empty coverage shard for ${file}\n`);
continue;
}
reports.push(readFileSync(lcovPath, 'utf8'));
writeFileSync(join(coverageDir, 'lcov.info'), mergeLcovReports(reports), 'utf8');
process.stdout.write(`Merged LCOV written to ${relative(repoRoot, join(coverageDir, 'lcov.info'))}\n`);
return 0;
} finally {
rmSync(shardRoot, { recursive: true, force: true });
}
writeFileSync(join(coverageDir, 'lcov.info'), mergeLcovReports(reports), 'utf8');
rmSync(shardRoot, { recursive: true, force: true });
process.stdout.write(`Merged LCOV written to ${relative(repoRoot, join(coverageDir, 'lcov.info'))}\n`);
return 0;
}
if (require.main === module) {
// @ts-ignore Bun entrypoint detection; TS config for scripts still targets CommonJS.
if (import.meta.main) {
process.exit(runCoverageLane());
}

View File

@@ -82,6 +82,65 @@ test('pruneRawRetention uses session retention separately from telemetry retenti
}
});
test('pruneRawRetention skips disabled retention windows', () => {
const dbPath = makeDbPath();
const db = new Database(dbPath);
try {
ensureSchema(db);
const nowMs = 1_000_000_000;
db.exec(`
INSERT INTO imm_videos (
video_id, video_key, canonical_title, source_type, duration_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'local:/tmp/video.mkv', 'Video', 1, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_sessions (
session_id, session_uuid, video_id, started_at_ms, ended_at_ms, status, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 'session-1', 1, ${nowMs - 1_000}, ${nowMs - 500}, 2, ${nowMs}, ${nowMs}
);
INSERT INTO imm_session_telemetry (
session_id, sample_ms, total_watched_ms, active_watched_ms, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, ${nowMs - 2_000}, 0, 0, ${nowMs}, ${nowMs}
);
INSERT INTO imm_session_events (
session_id, event_type, ts_ms, payload_json, CREATED_DATE, LAST_UPDATE_DATE
) VALUES (
1, 1, ${nowMs - 3_000}, '{}', ${nowMs}, ${nowMs}
);
`);
const result = pruneRawRetention(db, nowMs, {
eventsRetentionMs: Number.POSITIVE_INFINITY,
telemetryRetentionMs: Number.POSITIVE_INFINITY,
sessionsRetentionMs: Number.POSITIVE_INFINITY,
});
const remainingSessionEvents = db
.prepare('SELECT COUNT(*) AS count FROM imm_session_events')
.get() as { count: number };
const remainingTelemetry = db
.prepare('SELECT COUNT(*) AS count FROM imm_session_telemetry')
.get() as { count: number };
const remainingSessions = db
.prepare('SELECT COUNT(*) AS count FROM imm_sessions')
.get() as { count: number };
assert.equal(result.deletedSessionEvents, 0);
assert.equal(result.deletedTelemetryRows, 0);
assert.equal(result.deletedEndedSessions, 0);
assert.equal(remainingSessionEvents.count, 1);
assert.equal(remainingTelemetry.count, 1);
assert.equal(remainingSessions.count, 1);
} finally {
db.close();
cleanupDbPath(dbPath);
}
});
test('toMonthKey floors negative timestamps into the prior UTC month', () => {
assert.equal(toMonthKey(-1), 196912);
assert.equal(toMonthKey(-86_400_000), 196912);

View File

@@ -46,32 +46,34 @@ export function toMonthKey(timestampMs: number): number {
export function pruneRawRetention(
db: DatabaseSync,
nowMs: number,
currentMs: number,
policy: {
eventsRetentionMs: number;
telemetryRetentionMs: number;
sessionsRetentionMs: number;
},
): RawRetentionResult {
const eventCutoff = nowMs - policy.eventsRetentionMs;
const telemetryCutoff = nowMs - policy.telemetryRetentionMs;
const sessionsCutoff = nowMs - policy.sessionsRetentionMs;
const deletedSessionEvents = (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(toDbMs(eventCutoff)) as {
changes: number;
}
).changes;
const deletedTelemetryRows = (
db
.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`)
.run(toDbMs(telemetryCutoff)) as { changes: number }
).changes;
const deletedEndedSessions = (
db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(toDbMs(sessionsCutoff)) as { changes: number }
).changes;
const deletedSessionEvents = Number.isFinite(policy.eventsRetentionMs)
? (
db.prepare(`DELETE FROM imm_session_events WHERE ts_ms < ?`).run(
toDbMs(currentMs - policy.eventsRetentionMs),
) as { changes: number }
).changes
: 0;
const deletedTelemetryRows = Number.isFinite(policy.telemetryRetentionMs)
? (
db
.prepare(`DELETE FROM imm_session_telemetry WHERE sample_ms < ?`)
.run(toDbMs(currentMs - policy.telemetryRetentionMs)) as { changes: number }
).changes
: 0;
const deletedEndedSessions = Number.isFinite(policy.sessionsRetentionMs)
? (
db
.prepare(`DELETE FROM imm_sessions WHERE ended_at_ms IS NOT NULL AND ended_at_ms < ?`)
.run(toDbMs(currentMs - policy.sessionsRetentionMs)) as { changes: number }
).changes
: 0;
return {
deletedSessionEvents,
@@ -82,7 +84,7 @@ export function pruneRawRetention(
export function pruneRollupRetention(
db: DatabaseSync,
nowMs: number,
currentMs: number,
policy: {
dailyRollupRetentionMs: number;
monthlyRollupRetentionMs: number;
@@ -92,7 +94,7 @@ export function pruneRollupRetention(
? (
db
.prepare(`DELETE FROM imm_daily_rollups WHERE rollup_day < ?`)
.run(Math.floor((nowMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as {
.run(Math.floor((currentMs - policy.dailyRollupRetentionMs) / DAILY_MS)) as {
changes: number;
}
).changes
@@ -101,7 +103,7 @@ export function pruneRollupRetention(
? (
db
.prepare(`DELETE FROM imm_monthly_rollups WHERE rollup_month < ?`)
.run(toMonthKey(nowMs - policy.monthlyRollupRetentionMs)) as {
.run(toMonthKey(currentMs - policy.monthlyRollupRetentionMs)) as {
changes: number;
}
).changes
@@ -155,29 +157,32 @@ function upsertDailyRollupsForGroups(
CAST(julianday(s.started_at_ms / 1000, 'unixepoch', 'localtime') - 2440587.5 AS INTEGER) AS rollup_day,
s.video_id AS video_id,
COUNT(DISTINCT s.session_id) AS total_sessions,
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(COALESCE(sm.max_lines, s.lines_seen)), 0) AS total_lines_seen,
COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0) AS total_tokens_seen,
COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) AS total_cards,
CASE
WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0
THEN (COALESCE(SUM(sm.max_cards), 0) * 60.0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
WHEN COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) > 0
THEN (COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) * 60.0)
/ (COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0)
ELSE NULL
END AS cards_per_hour,
CASE
WHEN COALESCE(SUM(sm.max_active_ms), 0) > 0
THEN COALESCE(SUM(sm.max_tokens), 0) / (COALESCE(SUM(sm.max_active_ms), 0) / 60000.0)
WHEN COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) > 0
THEN COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0)
/ (COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0)
ELSE NULL
END AS tokens_per_min,
CASE
WHEN COALESCE(SUM(sm.max_lookups), 0) > 0
THEN CAST(COALESCE(SUM(sm.max_hits), 0) AS REAL) / CAST(SUM(sm.max_lookups) AS REAL)
WHEN COALESCE(SUM(COALESCE(sm.max_lookups, s.lookup_count)), 0) > 0
THEN CAST(COALESCE(SUM(COALESCE(sm.max_hits, s.lookup_hits)), 0) AS REAL)
/ CAST(COALESCE(SUM(COALESCE(sm.max_lookups, s.lookup_count)), 0) AS REAL)
ELSE NULL
END AS lookup_hit_rate,
? AS CREATED_DATE,
? AS LAST_UPDATE_DATE
FROM imm_sessions s
JOIN (
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
@@ -227,14 +232,14 @@ function upsertMonthlyRollupsForGroups(
CAST(strftime('%Y%m', s.started_at_ms / 1000, 'unixepoch', 'localtime') AS INTEGER) AS rollup_month,
s.video_id AS video_id,
COUNT(DISTINCT s.session_id) AS total_sessions,
COALESCE(SUM(sm.max_active_ms), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(sm.max_lines), 0) AS total_lines_seen,
COALESCE(SUM(sm.max_tokens), 0) AS total_tokens_seen,
COALESCE(SUM(sm.max_cards), 0) AS total_cards,
COALESCE(SUM(COALESCE(sm.max_active_ms, s.active_watched_ms)), 0) / 60000.0 AS total_active_min,
COALESCE(SUM(COALESCE(sm.max_lines, s.lines_seen)), 0) AS total_lines_seen,
COALESCE(SUM(COALESCE(sm.max_tokens, s.tokens_seen)), 0) AS total_tokens_seen,
COALESCE(SUM(COALESCE(sm.max_cards, s.cards_mined)), 0) AS total_cards,
? AS CREATED_DATE,
? AS LAST_UPDATE_DATE
FROM imm_sessions s
JOIN (
LEFT JOIN (
SELECT
t.session_id,
MAX(t.active_watched_ms) AS max_active_ms,
@@ -276,7 +281,7 @@ function getAffectedRollupGroups(
FROM imm_session_telemetry t
JOIN imm_sessions s
ON s.session_id = t.session_id
WHERE t.sample_ms > ?
WHERE t.sample_ms >= ?
`,
)
.all(lastRollupSampleMs) as unknown as RollupGroupRow[]

View File

@@ -186,7 +186,7 @@ export function getSimilarWords(db: DatabaseSync, wordId: number, limit = 10): S
headword: string;
reading: string;
} | null;
if (!word) return [];
if (!word || word.headword.trim() === '') return [];
return db
.prepare(
`

View File

@@ -205,7 +205,7 @@ export function getQueryHints(db: DatabaseSync): {
const now = new Date();
const todayLocal = Math.floor(
(now.getTime() / 1000 - now.getTimezoneOffset() * 60) / 86_400,
new Date(now.getFullYear(), now.getMonth(), now.getDate()).getTime() / 86_400_000,
);
const episodesToday =

View File

@@ -272,5 +272,11 @@ export function deleteSessionsByIds(db: DatabaseSync, sessionIds: number[]): voi
}
export function toDbMs(ms: number | bigint): bigint {
return BigInt(Math.trunc(Number(ms)));
if (typeof ms === 'bigint') {
return ms;
}
if (!Number.isFinite(ms)) {
throw new TypeError(`Invalid database timestamp: ${ms}`);
}
return BigInt(Math.trunc(ms));
}

View File

@@ -168,7 +168,7 @@ function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) {
words: 0,
sessions: 0,
};
existing.activeMin += Math.round(rollup.totalActiveMin);
existing.activeMin += rollup.totalActiveMin;
existing.cards += rollup.totalCards;
existing.words += rollup.totalTokensSeen;
existing.sessions += rollup.totalSessions;
@@ -179,7 +179,7 @@ function buildAggregatedTrendRows(rollups: ImmersionSessionRollupRow[]) {
.sort(([left], [right]) => left - right)
.map(([key, value]) => ({
label: makeTrendLabel(key),
activeMin: value.activeMin,
activeMin: Math.round(value.activeMin),
cards: value.cards,
words: value.words,
sessions: value.sessions,
@@ -243,22 +243,32 @@ function buildSessionSeriesByMonth(
.map(([monthKey, value]) => ({ label: makeTrendLabel(monthKey), value }));
}
function buildLookupsPerHundredWords(sessions: TrendSessionMetricRow[]): TrendChartPoint[] {
const lookupsByDay = new Map<number, number>();
const wordsByDay = new Map<number, number>();
function buildLookupsPerHundredWords(
sessions: TrendSessionMetricRow[],
groupBy: TrendGroupBy,
): TrendChartPoint[] {
const lookupsByBucket = new Map<number, number>();
const wordsByBucket = new Map<number, number>();
for (const session of sessions) {
const epochDay = getLocalEpochDay(session.startedAtMs);
lookupsByDay.set(epochDay, (lookupsByDay.get(epochDay) ?? 0) + session.yomitanLookupCount);
wordsByDay.set(epochDay, (wordsByDay.get(epochDay) ?? 0) + getTrendSessionWordCount(session));
const bucketKey =
groupBy === 'month' ? getLocalMonthKey(session.startedAtMs) : getLocalEpochDay(session.startedAtMs);
lookupsByBucket.set(
bucketKey,
(lookupsByBucket.get(bucketKey) ?? 0) + session.yomitanLookupCount,
);
wordsByBucket.set(
bucketKey,
(wordsByBucket.get(bucketKey) ?? 0) + getTrendSessionWordCount(session),
);
}
return Array.from(lookupsByDay.entries())
return Array.from(lookupsByBucket.entries())
.sort(([left], [right]) => left - right)
.map(([epochDay, lookups]) => {
const words = wordsByDay.get(epochDay) ?? 0;
.map(([bucketKey, lookups]) => {
const words = wordsByBucket.get(bucketKey) ?? 0;
return {
label: dayLabel(epochDay),
label: groupBy === 'month' ? makeTrendLabel(bucketKey) : dayLabel(bucketKey),
value: words > 0 ? +((lookups / words) * 100).toFixed(1) : 0,
};
});
@@ -595,7 +605,7 @@ export function getTrendsDashboard(
const animePerDay = {
episodes: buildEpisodesPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId),
watchTime: buildPerAnimeFromDailyRollups(dailyRollups, titlesByVideoId, (rollup) =>
Math.round(rollup.totalActiveMin),
rollup.totalActiveMin,
),
cards: buildPerAnimeFromDailyRollups(
dailyRollups,
@@ -633,7 +643,7 @@ export function getTrendsDashboard(
),
},
ratios: {
lookupsPerHundred: buildLookupsPerHundredWords(sessions),
lookupsPerHundred: buildLookupsPerHundredWords(sessions, groupBy),
},
animePerDay,
animeCumulative: {

View File

@@ -1,7 +1,9 @@
import { Hono } from 'hono';
import type { ImmersionTrackerService } from './immersion-tracker-service.js';
import http, { type IncomingMessage, type ServerResponse } from 'node:http';
import { basename, extname, resolve, sep } from 'node:path';
import { readFileSync, existsSync, statSync } from 'node:fs';
import { Readable } from 'node:stream';
import { MediaGenerator } from '../../media-generator.js';
import { AnkiConnectClient } from '../../anki-connect.js';
import type { AnkiConnectConfig } from '../../types.js';
@@ -59,6 +61,71 @@ function resolveStatsNoteFieldName(
return null;
}
function toFetchHeaders(headers: IncomingMessage['headers']): Headers {
const fetchHeaders = new Headers();
for (const [name, value] of Object.entries(headers)) {
if (value === undefined) continue;
if (Array.isArray(value)) {
for (const entry of value) {
fetchHeaders.append(name, entry);
}
continue;
}
fetchHeaders.set(name, value);
}
return fetchHeaders;
}
function toFetchRequest(req: IncomingMessage): Request {
const method = req.method ?? 'GET';
const url = new URL(req.url ?? '/', `http://${req.headers.host ?? '127.0.0.1'}`);
const init: RequestInit & { duplex?: 'half' } = {
method,
headers: toFetchHeaders(req.headers),
};
if (method !== 'GET' && method !== 'HEAD') {
init.body = Readable.toWeb(req) as BodyInit;
init.duplex = 'half';
}
return new Request(url, init);
}
async function writeFetchResponse(res: ServerResponse, response: Response): Promise<void> {
res.statusCode = response.status;
response.headers.forEach((value, key) => {
res.setHeader(key, value);
});
const body = await response.arrayBuffer();
res.end(Buffer.from(body));
}
function startNodeHttpServer(
app: Hono,
config: StatsServerConfig,
): { close: () => void } {
const server = http.createServer((req, res) => {
void (async () => {
try {
await writeFetchResponse(res, await app.fetch(toFetchRequest(req)));
} catch {
res.statusCode = 500;
res.end('Internal Server Error');
}
})();
});
server.listen(config.port, '127.0.0.1');
return {
close: () => {
server.close();
},
};
}
/** Load known words cache from disk into a Set. Returns null if unavailable. */
function loadKnownWordsSet(cachePath: string | undefined): Set<string> | null {
if (!cachePath || !existsSync(cachePath)) return null;
@@ -1006,27 +1073,29 @@ export function startStatsServer(config: StatsServerConfig): { close: () => void
resolveAnkiNoteId: config.resolveAnkiNoteId,
});
const bunServe = (
globalThis as typeof globalThis & {
Bun: {
serve: (options: {
fetch: (typeof app)['fetch'];
port: number;
hostname: string;
}) => { stop: () => void };
};
}
).Bun.serve;
const server = bunServe({
fetch: app.fetch,
port: config.port,
hostname: '127.0.0.1',
});
return {
close: () => {
server.stop();
},
const bunRuntime = globalThis as typeof globalThis & {
Bun?: {
serve?: (options: {
fetch: (typeof app)['fetch'];
port: number;
hostname: string;
}) => { stop: () => void };
};
};
if (bunRuntime.Bun?.serve) {
const server = bunRuntime.Bun.serve({
fetch: app.fetch,
port: config.port,
hostname: '127.0.0.1',
});
return {
close: () => {
server.stop();
},
};
}
return startNodeHttpServer(app, config);
}

View File

@@ -31,6 +31,7 @@ import {
screen,
} from 'electron';
import { applyControllerConfigUpdate } from './main/controller-config-update.js';
import { createDiscordRpcClient } from './main/runtime/discord-rpc-client.js';
import { mergeAiConfig } from './ai/config';
function getPasswordStoreArg(argv: string[]): string | null {
@@ -68,6 +69,26 @@ function getDefaultPasswordStore(): string {
return 'gnome-libsecret';
}
function getStartupModeFlags(initialArgs: CliArgs | null | undefined): {
shouldUseMinimalStartup: boolean;
shouldSkipHeavyStartup: boolean;
} {
return {
shouldUseMinimalStartup: Boolean(
initialArgs?.texthooker ||
(initialArgs?.stats &&
(initialArgs.statsCleanup || initialArgs.statsBackground || initialArgs.statsStop)),
),
shouldSkipHeavyStartup: Boolean(
initialArgs &&
(shouldRunSettingsOnlyStartup(initialArgs) ||
initialArgs.stats ||
initialArgs.dictionary ||
initialArgs.setup),
),
};
}
protocol.registerSchemesAsPrivileged([
{
scheme: 'chrome-extension',
@@ -399,7 +420,7 @@ import {
import { handleMpvCommandFromIpcRuntime } from './main/ipc-mpv-command';
import { registerIpcRuntimeServices } from './main/ipc-runtime';
import { createAnkiJimakuIpcRuntimeServiceDeps } from './main/dependencies';
import { createMainBootServices } from './main/boot/services';
import { createMainBootServices, type MainBootServicesResult } from './main/boot/services';
import { handleCliCommandRuntimeServiceWithContext } from './main/cli-runtime';
import { createOverlayModalRuntimeService } from './main/overlay-runtime';
import { createOverlayModalInputState } from './main/runtime/overlay-modal-input-state';
@@ -596,6 +617,28 @@ const getDefaultSocketPathHandler = createGetDefaultSocketPathHandler(getDefault
function getDefaultSocketPath(): string {
return getDefaultSocketPathHandler();
}
type BootServices = MainBootServicesResult<
ConfigService,
ReturnType<typeof createAnilistTokenStore>,
ReturnType<typeof createJellyfinTokenStore>,
ReturnType<typeof createAnilistUpdateQueue>,
SubtitleWebSocket,
ReturnType<typeof createLogger>,
ReturnType<typeof createMainRuntimeRegistry>,
ReturnType<typeof createOverlayManager>,
ReturnType<typeof createOverlayModalInputState>,
ReturnType<typeof createOverlayContentMeasurementStore>,
ReturnType<typeof createOverlayModalRuntimeService>,
ReturnType<typeof createAppState>,
{
requestSingleInstanceLock: () => boolean;
quit: () => void;
on: (event: string, listener: (...args: unknown[]) => void) => unknown;
whenReady: () => Promise<void>;
}
>;
const bootServices = createMainBootServices({
platform: process.platform,
argv: process.argv,
@@ -675,31 +718,7 @@ const bootServices = createMainBootServices({
});
},
createAppState,
}) as {
configDir: string;
userDataPath: string;
defaultMpvLogPath: string;
defaultImmersionDbPath: string;
configService: ConfigService;
anilistTokenStore: ReturnType<typeof createAnilistTokenStore>;
jellyfinTokenStore: ReturnType<typeof createJellyfinTokenStore>;
anilistUpdateQueue: ReturnType<typeof createAnilistUpdateQueue>;
subtitleWsService: SubtitleWebSocket;
annotationSubtitleWsService: SubtitleWebSocket;
logger: ReturnType<typeof createLogger>;
runtimeRegistry: ReturnType<typeof createMainRuntimeRegistry>;
overlayManager: ReturnType<typeof createOverlayManager>;
overlayModalInputState: ReturnType<typeof createOverlayModalInputState>;
overlayContentMeasurementStore: ReturnType<typeof createOverlayContentMeasurementStore>;
overlayModalRuntime: ReturnType<typeof createOverlayModalRuntimeService>;
appState: ReturnType<typeof createAppState>;
appLifecycleApp: {
requestSingleInstanceLock: () => boolean;
quit: () => void;
on: (event: string, listener: (...args: unknown[]) => void) => unknown;
whenReady: () => Promise<void>;
};
};
}) as BootServices;
const {
configDir: CONFIG_DIR,
userDataPath: USER_DATA_PATH,
@@ -1097,26 +1116,6 @@ const discordPresenceRuntime = createDiscordPresenceRuntime({
},
});
function createDiscordRpcClient() {
const discordRpc = require('discord-rpc') as {
Client: new (opts: { transport: 'ipc' }) => {
login: (opts: { clientId: string }) => Promise<void>;
setActivity: (activity: Record<string, unknown>) => Promise<void>;
clearActivity: () => Promise<void>;
destroy: () => void;
};
};
const client = new discordRpc.Client({ transport: 'ipc' });
return {
login: () => client.login({ clientId: DISCORD_PRESENCE_APP_ID }),
setActivity: (activity: unknown) =>
client.setActivity(activity as unknown as Record<string, unknown>),
clearActivity: () => client.clearActivity(),
destroy: () => client.destroy(),
};
}
async function initializeDiscordPresenceService(): Promise<void> {
if (getResolvedConfig().discordPresence.enabled !== true) {
appState.discordPresenceService = null;
@@ -1125,7 +1124,7 @@ async function initializeDiscordPresenceService(): Promise<void> {
appState.discordPresenceService = createDiscordPresenceService({
config: getResolvedConfig().discordPresence,
createClient: () => createDiscordRpcClient(),
createClient: () => createDiscordRpcClient(DISCORD_PRESENCE_APP_ID),
logDebug: (message, meta) => logger.debug(message, meta),
});
await appState.discordPresenceService.start();
@@ -3186,21 +3185,9 @@ const { appReadyRuntimeRunner } = composeAppReadyRuntime({
shouldRunHeadlessInitialCommand: () =>
Boolean(appState.initialArgs && isHeadlessInitialCommand(appState.initialArgs)),
shouldUseMinimalStartup: () =>
Boolean(
appState.initialArgs?.texthooker ||
(appState.initialArgs?.stats &&
(appState.initialArgs?.statsCleanup ||
appState.initialArgs?.statsBackground ||
appState.initialArgs?.statsStop)),
),
getStartupModeFlags(appState.initialArgs).shouldUseMinimalStartup,
shouldSkipHeavyStartup: () =>
Boolean(
appState.initialArgs &&
(shouldRunSettingsOnlyStartup(appState.initialArgs) ||
appState.initialArgs.stats ||
appState.initialArgs.dictionary ||
appState.initialArgs.setup),
),
getStartupModeFlags(appState.initialArgs).shouldSkipHeavyStartup,
createImmersionTracker: () => {
ensureImmersionTrackerStarted();
},
@@ -4221,16 +4208,16 @@ const { registerIpcRuntimeHandlers } = composeIpcRuntimeHandlers({
};
}
if (appState.activeParsedSubtitleSource === resolvedSource.sourceKey) {
return {
cues: appState.activeParsedSubtitleCues,
currentTimeSec,
currentSubtitle,
config,
};
}
try {
if (appState.activeParsedSubtitleSource === resolvedSource.sourceKey) {
return {
cues: appState.activeParsedSubtitleCues,
currentTimeSec,
currentSubtitle,
config,
};
}
const content = await loadSubtitleSourceText(resolvedSource.path);
const cues = parseSubtitleCues(content, resolvedSource.path);
appState.activeParsedSubtitleCues = cues;
@@ -4480,11 +4467,20 @@ const { runAndApplyStartupState } = composeHeadlessStartupHandlers<
});
runAndApplyStartupState();
if (isAnilistTrackingEnabled(getResolvedConfig())) {
void refreshAnilistClientSecretStateIfEnabled({ force: true });
anilistStateRuntime.refreshRetryQueueState();
const startupModeFlags = getStartupModeFlags(appState.initialArgs);
const shouldUseMinimalStartup = startupModeFlags.shouldUseMinimalStartup;
const shouldSkipHeavyStartup = startupModeFlags.shouldSkipHeavyStartup;
if (!appState.initialArgs || (!shouldUseMinimalStartup && !shouldSkipHeavyStartup)) {
if (isAnilistTrackingEnabled(getResolvedConfig())) {
void refreshAnilistClientSecretStateIfEnabled({ force: true }).catch((error) => {
logger.error('Failed to refresh AniList client secret state during startup', error);
});
anilistStateRuntime.refreshRetryQueueState();
}
void initializeDiscordPresenceService().catch((error) => {
logger.error('Failed to initialize Discord presence service during startup', error);
});
}
void initializeDiscordPresenceService();
const { createMainWindow: createMainWindowHandler, createModalWindow: createModalWindowHandler } =
createOverlayWindowRuntimeHandlers<BrowserWindow>({
createOverlayWindowDeps: {

View File

@@ -0,0 +1,38 @@
import assert from 'node:assert/strict';
import test from 'node:test';
import { createDiscordRpcClient } from './discord-rpc-client';
test('createDiscordRpcClient forwards rich presence calls through client.user', async () => {
const calls: Array<string> = [];
const rpcClient = createDiscordRpcClient('123456789012345678', {
createClient: () =>
({
login: async () => {
calls.push('login');
},
user: {
setActivity: async () => {
calls.push('setActivity');
},
clearActivity: async () => {
calls.push('clearActivity');
},
},
destroy: async () => {
calls.push('destroy');
},
}) as never,
});
await rpcClient.login();
await rpcClient.setActivity({
details: 'Title',
state: 'Playing 00:01 / 00:02',
startTimestamp: 1_700_000_000,
});
await rpcClient.clearActivity();
await rpcClient.destroy();
assert.deepEqual(calls, ['login', 'setActivity', 'clearActivity', 'destroy']);
});

View File

@@ -0,0 +1,49 @@
import { Client } from '@xhayper/discord-rpc';
import type { DiscordActivityPayload } from '../../core/services/discord-presence';
type DiscordRpcClientUserLike = {
setActivity: (activity: DiscordActivityPayload) => Promise<unknown>;
clearActivity: () => Promise<void>;
};
type DiscordRpcRawClient = {
login: () => Promise<void>;
destroy: () => Promise<void>;
user?: DiscordRpcClientUserLike;
};
export type DiscordRpcClient = {
login: () => Promise<void>;
setActivity: (activity: DiscordActivityPayload) => Promise<void>;
clearActivity: () => Promise<void>;
destroy: () => Promise<void>;
};
function requireUser(client: DiscordRpcRawClient): DiscordRpcClientUserLike {
if (!client.user) {
throw new Error('Discord RPC client user is unavailable');
}
return client.user;
}
export function wrapDiscordRpcClient(client: DiscordRpcRawClient): DiscordRpcClient {
return {
login: () => client.login(),
setActivity: (activity) => requireUser(client).setActivity(activity).then(() => undefined),
clearActivity: () => requireUser(client).clearActivity(),
destroy: () => client.destroy(),
};
}
export function createDiscordRpcClient(
clientId: string,
deps?: { createClient?: (options: { clientId: string; transport: { type: 'ipc' } }) => DiscordRpcRawClient },
): DiscordRpcClient {
const client =
deps?.createClient?.({ clientId, transport: { type: 'ipc' } }) ??
new Client({ clientId, transport: { type: 'ipc' } });
return wrapDiscordRpcClient(client);
}

View File

@@ -18,9 +18,9 @@ function createSetupWindowHandler<TWindow>(
title: config.title,
show: true,
autoHideMenuBar: true,
resizable: config.resizable,
minimizable: config.minimizable,
maximizable: config.maximizable,
...(config.resizable === undefined ? {} : { resizable: config.resizable }),
...(config.minimizable === undefined ? {} : { minimizable: config.minimizable }),
...(config.maximizable === undefined ? {} : { maximizable: config.maximizable }),
webPreferences: {
nodeIntegration: false,
contextIsolation: true,