Update AniList docs

This commit is contained in:
2026-02-16 23:03:51 -08:00
parent 1d7406f3d4
commit 5602d751eb
9 changed files with 681 additions and 3 deletions

View File

@@ -0,0 +1,111 @@
import * as fs from "fs";
import * as path from "path";
import { safeStorage } from "electron";
interface PersistedTokenPayload {
encryptedToken?: string;
plaintextToken?: string;
updatedAt?: number;
}
export interface AnilistTokenStore {
loadToken: () => string | null;
saveToken: (token: string) => void;
clearToken: () => void;
}
function ensureDirectory(filePath: string): void {
const dir = path.dirname(filePath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
function writePayload(filePath: string, payload: PersistedTokenPayload): void {
ensureDirectory(filePath);
fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), "utf-8");
}
export function createAnilistTokenStore(
filePath: string,
logger: {
info: (message: string) => void;
warn: (message: string, details?: unknown) => void;
error: (message: string, details?: unknown) => void;
},
): AnilistTokenStore {
return {
loadToken(): string | null {
if (!fs.existsSync(filePath)) {
return null;
}
try {
const raw = fs.readFileSync(filePath, "utf-8");
const parsed = JSON.parse(raw) as PersistedTokenPayload;
if (
typeof parsed.encryptedToken === "string" &&
parsed.encryptedToken.length > 0
) {
const encrypted = Buffer.from(parsed.encryptedToken, "base64");
if (!safeStorage.isEncryptionAvailable()) {
logger.warn(
"AniList token encryption is not available on this system.",
);
return null;
}
const decrypted = safeStorage.decryptString(encrypted).trim();
return decrypted.length > 0 ? decrypted : null;
}
if (
typeof parsed.plaintextToken === "string" &&
parsed.plaintextToken.trim().length > 0
) {
// Legacy fallback: migrate plaintext token to encrypted storage on load.
const plaintext = parsed.plaintextToken.trim();
this.saveToken(plaintext);
return plaintext;
}
} catch (error) {
logger.error("Failed to read AniList token store.", error);
}
return null;
},
saveToken(token: string): void {
const trimmed = token.trim();
if (trimmed.length === 0) {
this.clearToken();
return;
}
try {
if (!safeStorage.isEncryptionAvailable()) {
logger.warn(
"AniList token encryption unavailable; storing token in plaintext fallback.",
);
writePayload(filePath, {
plaintextToken: trimmed,
updatedAt: Date.now(),
});
return;
}
const encrypted = safeStorage.encryptString(trimmed);
writePayload(filePath, {
encryptedToken: encrypted.toString("base64"),
updatedAt: Date.now(),
});
} catch (error) {
logger.error("Failed to persist AniList token.", error);
}
},
clearToken(): void {
if (!fs.existsSync(filePath)) return;
try {
fs.unlinkSync(filePath);
logger.info("Cleared stored AniList token.");
} catch (error) {
logger.error("Failed to clear stored AniList token.", error);
}
},
};
}

View File

@@ -0,0 +1,195 @@
import * as fs from "fs";
import * as path from "path";
const INITIAL_BACKOFF_MS = 30_000;
const MAX_BACKOFF_MS = 6 * 60 * 60 * 1000;
const MAX_ATTEMPTS = 8;
const MAX_ITEMS = 500;
export interface AnilistQueuedUpdate {
key: string;
title: string;
episode: number;
createdAt: number;
attemptCount: number;
nextAttemptAt: number;
lastError: string | null;
}
interface AnilistRetryQueuePayload {
pending?: AnilistQueuedUpdate[];
deadLetter?: AnilistQueuedUpdate[];
}
export interface AnilistRetryQueueSnapshot {
pending: number;
ready: number;
deadLetter: number;
}
export interface AnilistUpdateQueue {
enqueue: (key: string, title: string, episode: number) => void;
nextReady: (nowMs?: number) => AnilistQueuedUpdate | null;
markSuccess: (key: string) => void;
markFailure: (key: string, reason: string, nowMs?: number) => void;
getSnapshot: (nowMs?: number) => AnilistRetryQueueSnapshot;
}
function ensureDir(filePath: string): void {
const dir = path.dirname(filePath);
if (!fs.existsSync(dir)) {
fs.mkdirSync(dir, { recursive: true });
}
}
function clampBackoffMs(attemptCount: number): number {
const computed = INITIAL_BACKOFF_MS * Math.pow(2, Math.max(0, attemptCount - 1));
return Math.min(MAX_BACKOFF_MS, computed);
}
export function createAnilistUpdateQueue(
filePath: string,
logger: {
info: (message: string) => void;
warn: (message: string, details?: unknown) => void;
error: (message: string, details?: unknown) => void;
},
): AnilistUpdateQueue {
let pending: AnilistQueuedUpdate[] = [];
let deadLetter: AnilistQueuedUpdate[] = [];
const persist = () => {
try {
ensureDir(filePath);
const payload: AnilistRetryQueuePayload = { pending, deadLetter };
fs.writeFileSync(filePath, JSON.stringify(payload, null, 2), "utf-8");
} catch (error) {
logger.error("Failed to persist AniList retry queue.", error);
}
};
const load = () => {
if (!fs.existsSync(filePath)) {
return;
}
try {
const raw = fs.readFileSync(filePath, "utf-8");
const parsed = JSON.parse(raw) as AnilistRetryQueuePayload;
const parsedPending = Array.isArray(parsed.pending) ? parsed.pending : [];
const parsedDeadLetter = Array.isArray(parsed.deadLetter)
? parsed.deadLetter
: [];
pending = parsedPending
.filter(
(item): item is AnilistQueuedUpdate =>
item &&
typeof item.key === "string" &&
typeof item.title === "string" &&
typeof item.episode === "number" &&
item.episode > 0 &&
typeof item.createdAt === "number" &&
typeof item.attemptCount === "number" &&
typeof item.nextAttemptAt === "number" &&
(typeof item.lastError === "string" || item.lastError === null),
)
.slice(0, MAX_ITEMS);
deadLetter = parsedDeadLetter
.filter(
(item): item is AnilistQueuedUpdate =>
item &&
typeof item.key === "string" &&
typeof item.title === "string" &&
typeof item.episode === "number" &&
item.episode > 0 &&
typeof item.createdAt === "number" &&
typeof item.attemptCount === "number" &&
typeof item.nextAttemptAt === "number" &&
(typeof item.lastError === "string" || item.lastError === null),
)
.slice(0, MAX_ITEMS);
} catch (error) {
logger.error("Failed to load AniList retry queue.", error);
}
};
load();
return {
enqueue(key: string, title: string, episode: number): void {
const existing = pending.find((item) => item.key === key);
if (existing) {
return;
}
if (pending.length >= MAX_ITEMS) {
pending.shift();
}
pending.push({
key,
title,
episode,
createdAt: Date.now(),
attemptCount: 0,
nextAttemptAt: Date.now(),
lastError: null,
});
persist();
logger.info(`Queued AniList retry for "${title}" episode ${episode}.`);
},
nextReady(nowMs: number = Date.now()): AnilistQueuedUpdate | null {
const ready = pending.find((item) => item.nextAttemptAt <= nowMs);
return ready ?? null;
},
markSuccess(key: string): void {
const before = pending.length;
pending = pending.filter((item) => item.key !== key);
if (pending.length !== before) {
persist();
}
},
markFailure(key: string, reason: string, nowMs: number = Date.now()): void {
const item = pending.find((candidate) => candidate.key === key);
if (!item) {
return;
}
item.attemptCount += 1;
item.lastError = reason;
if (item.attemptCount >= MAX_ATTEMPTS) {
pending = pending.filter((candidate) => candidate.key !== key);
if (deadLetter.length >= MAX_ITEMS) {
deadLetter.shift();
}
deadLetter.push({
...item,
nextAttemptAt: nowMs,
});
logger.warn("AniList retry moved to dead-letter queue.", {
key,
reason,
attempts: item.attemptCount,
});
persist();
return;
}
item.nextAttemptAt = nowMs + clampBackoffMs(item.attemptCount);
persist();
logger.warn("AniList retry scheduled with backoff.", {
key,
attemptCount: item.attemptCount,
nextAttemptAt: item.nextAttemptAt,
reason,
});
},
getSnapshot(nowMs: number = Date.now()): AnilistRetryQueueSnapshot {
const ready = pending.filter((item) => item.nextAttemptAt <= nowMs).length;
return {
pending: pending.length,
ready,
deadLetter: deadLetter.length,
};
},
};
}

View File

@@ -0,0 +1,170 @@
import test from "node:test";
import assert from "node:assert/strict";
import * as childProcess from "child_process";
import {
guessAnilistMediaInfo,
updateAnilistPostWatchProgress,
} from "./anilist-updater";
function createJsonResponse(payload: unknown): Response {
return new Response(JSON.stringify(payload), {
status: 200,
headers: { "content-type": "application/json" },
});
}
test("guessAnilistMediaInfo uses guessit output when available", async () => {
const originalExecFile = childProcess.execFile;
(
childProcess as unknown as {
execFile: typeof childProcess.execFile;
}
).execFile = ((...args: unknown[]) => {
const callback = args[args.length - 1];
const cb = typeof callback === "function"
? (callback as (error: Error | null, stdout: string, stderr: string) => void)
: null;
cb?.(null, JSON.stringify({ title: "Guessit Title", episode: 7 }), "");
return {} as childProcess.ChildProcess;
}) as typeof childProcess.execFile;
try {
const result = await guessAnilistMediaInfo("/tmp/demo.mkv", null);
assert.deepEqual(result, {
title: "Guessit Title",
episode: 7,
source: "guessit",
});
} finally {
(
childProcess as unknown as {
execFile: typeof childProcess.execFile;
}
).execFile = originalExecFile;
}
});
test("guessAnilistMediaInfo falls back to parser when guessit fails", async () => {
const originalExecFile = childProcess.execFile;
(
childProcess as unknown as {
execFile: typeof childProcess.execFile;
}
).execFile = ((...args: unknown[]) => {
const callback = args[args.length - 1];
const cb = typeof callback === "function"
? (callback as (error: Error | null, stdout: string, stderr: string) => void)
: null;
cb?.(new Error("guessit not found"), "", "");
return {} as childProcess.ChildProcess;
}) as typeof childProcess.execFile;
try {
const result = await guessAnilistMediaInfo(
"/tmp/My Anime S01E03.mkv",
null,
);
assert.deepEqual(result, {
title: "My Anime",
episode: 3,
source: "fallback",
});
} finally {
(
childProcess as unknown as {
execFile: typeof childProcess.execFile;
}
).execFile = originalExecFile;
}
});
test("updateAnilistPostWatchProgress updates progress when behind", async () => {
const originalFetch = globalThis.fetch;
let call = 0;
globalThis.fetch = (async () => {
call += 1;
if (call === 1) {
return createJsonResponse({
data: {
Page: {
media: [
{
id: 11,
episodes: 24,
title: { english: "Demo Show", romaji: "Demo Show" },
},
],
},
},
});
}
if (call === 2) {
return createJsonResponse({
data: {
Media: {
id: 11,
mediaListEntry: { progress: 2, status: "CURRENT" },
},
},
});
}
return createJsonResponse({
data: { SaveMediaListEntry: { progress: 3, status: "CURRENT" } },
});
}) as typeof fetch;
try {
const result = await updateAnilistPostWatchProgress("token", "Demo Show", 3);
assert.equal(result.status, "updated");
assert.match(result.message, /episode 3/i);
} finally {
globalThis.fetch = originalFetch;
}
});
test("updateAnilistPostWatchProgress skips when progress already reached", async () => {
const originalFetch = globalThis.fetch;
let call = 0;
globalThis.fetch = (async () => {
call += 1;
if (call === 1) {
return createJsonResponse({
data: {
Page: {
media: [{ id: 22, episodes: 12, title: { english: "Skip Show" } }],
},
},
});
}
return createJsonResponse({
data: {
Media: { id: 22, mediaListEntry: { progress: 12, status: "CURRENT" } },
},
});
}) as typeof fetch;
try {
const result = await updateAnilistPostWatchProgress("token", "Skip Show", 10);
assert.equal(result.status, "skipped");
assert.match(result.message, /already at episode/i);
} finally {
globalThis.fetch = originalFetch;
}
});
test("updateAnilistPostWatchProgress returns error when search fails", async () => {
const originalFetch = globalThis.fetch;
globalThis.fetch = (async () =>
createJsonResponse({
errors: [{ message: "bad request" }],
})) as typeof fetch;
try {
const result = await updateAnilistPostWatchProgress("token", "Bad", 1);
assert.equal(result.status, "error");
assert.match(result.message, /search failed/i);
} finally {
globalThis.fetch = originalFetch;
}
});