feat: PrivateHD watcher ve item tabanli scraper entegrasyonunu ekle

This commit is contained in:
2026-03-13 02:08:17 +03:00
parent baad2b3e96
commit bf278ad786
9 changed files with 355 additions and 93 deletions

View File

@@ -98,7 +98,7 @@ export interface TimerSummary {
updatedAt: string;
}
export type WatcherTracker = "happyfappy";
export type WatcherTracker = string;
export type WatcherItemStatus =
| "bookmarked"
@@ -113,6 +113,7 @@ export interface Watcher {
id: string;
tracker: WatcherTracker;
trackerLabel: string;
wishlistUrl?: string;
category?: string;
cookieEncrypted: string;
cookieHint: string;
@@ -137,6 +138,8 @@ export interface WatcherItem {
pageUrl: string;
title: string;
imageUrl?: string;
downloadUrl?: string;
removeToken?: string;
status: WatcherItemStatus;
statusLabel: string;
qbitHash?: string;

View File

@@ -7,6 +7,12 @@ export const trackerRegistry: TrackerDefinition[] = [
cliSiteKey: "happyfappy",
supportsRemoveBookmark: true,
},
{
key: "privatehd",
label: "PrivateHD",
cliSiteKey: "privatehd",
supportsRemoveBookmark: true,
},
];
export const getTrackerDefinition = (key: string) => {

View File

@@ -1,4 +1,5 @@
import { Router } from "express";
import crypto from "node:crypto";
import { z } from "zod";
import {
createWatcher,
@@ -20,6 +21,7 @@ const createWatcherSchema = z.object({
cookie: z.string().min(1),
intervalMinutes: z.number().int().min(1).max(24 * 60),
category: z.string().optional(),
wishlistUrl: z.string().optional(),
enabled: z.boolean().optional(),
});
@@ -27,11 +29,12 @@ const updateWatcherSchema = z.object({
cookie: z.string().min(1).optional(),
intervalMinutes: z.number().int().min(1).max(24 * 60).optional(),
category: z.string().optional(),
wishlistUrl: z.string().optional(),
enabled: z.boolean().optional(),
});
router.get("/trackers", (_req, res) => {
return res.json(listTrackers());
router.get("/trackers", async (_req, res) => {
return res.json(await listTrackers());
});
router.get("/categories", async (_req, res) => {
@@ -105,8 +108,18 @@ router.get("/image", async (req, res) => {
}
try {
const image = await fetchWatcherImage(watcherId, imageUrl);
const etag = `"${crypto
.createHash("sha1")
.update(`${watcherId}:${imageUrl}:${image.data.length}`)
.digest("hex")}"`;
if (req.headers["if-none-match"] === etag) {
res.status(304);
return res.end();
}
res.setHeader("Content-Type", image.contentType);
res.setHeader("Cache-Control", "private, max-age=300");
res.setHeader("Cache-Control", "private, max-age=1800, immutable");
res.setHeader("ETag", etag);
res.setHeader("Vary", "Accept-Encoding");
return res.send(image.data);
} catch (error) {
return res.status(400).json({ error: error instanceof Error ? error.message : "Failed to fetch image" });

View File

@@ -4,7 +4,7 @@ import path from "node:path";
import axios from "axios";
import { config } from "../config";
import { logger } from "../utils/logger";
import { BookmarkRecord, ScraperRunPaths } from "./watcher.types";
import { BookmarkRecord, ScraperRunPaths, TrackerDefinition } from "./watcher.types";
const ensureDir = async (target: string) => {
await fs.mkdir(target, { recursive: true });
@@ -65,13 +65,25 @@ const request = async <T>(method: "GET" | "POST", url: string, body?: unknown) =
}
};
export const listScraperTrackers = async () => {
const response = await request<{ items: Array<{ key: string; label: string }> }>("GET", "/trackers");
return response.items.map<TrackerDefinition>((item) => ({
key: item.key,
label: item.label,
cliSiteKey: item.key,
supportsRemoveBookmark: true,
}));
};
export const runBookmarkFetch = async (
trackerSiteKey: string,
cookie: string
cookie: string,
wishlistUrl?: string
) => {
const response = await request<{ items: BookmarkRecord[] }>("POST", "/bookmarks", {
tracker: trackerSiteKey,
cookie,
wishlistUrl,
});
return response.items;
};
@@ -79,20 +91,35 @@ export const runBookmarkFetch = async (
export const runTorrentDownload = async (
trackerSiteKey: string,
cookie: string,
detailUrl: string,
outputDir: string
bookmark: BookmarkRecord,
outputDir: string,
wishlistUrl?: string
) => {
const response = await request<{ filename: string; contentBase64: string }>("POST", "/download", {
tracker: trackerSiteKey,
cookie,
url: detailUrl,
removeBookmark: true,
item: bookmark,
wishlistUrl,
});
const targetPath = path.join(outputDir, response.filename);
await fs.writeFile(targetPath, Buffer.from(response.contentBase64, "base64"));
return targetPath;
};
export const runBookmarkRemove = async (
trackerSiteKey: string,
cookie: string,
bookmark: BookmarkRecord,
wishlistUrl?: string
) => {
await request<{ ok: true }>("POST", "/remove-bookmark", {
tracker: trackerSiteKey,
cookie,
item: bookmark,
wishlistUrl,
});
};
export const cleanupRunPaths = async (paths: ScraperRunPaths) => {
try {
await fs.rm(paths.runDir, { recursive: true, force: true });

View File

@@ -10,14 +10,27 @@ import { appendAuditLog, logger } from "../utils/logger";
import { emitWatcherItems, emitWatchersList, emitWatcherSummary } from "../realtime/emitter";
import { buildCookieHint, decryptWatcherCookie, encryptWatcherCookie } from "./watcher.crypto";
import { getTrackerDefinition, trackerRegistry } from "./watcher.registry";
import { cleanupRunPaths, createScraperRunPaths, runBookmarkFetch, runTorrentDownload } from "./watcher.scraper";
import {
cleanupRunPaths,
createScraperRunPaths,
listScraperTrackers,
runBookmarkFetch,
runBookmarkRemove,
runTorrentDownload,
} from "./watcher.scraper";
import { BookmarkRecord, EnrichedWatcherItem, WatcherListItem, WatcherSummaryResponse } from "./watcher.types";
const MAX_WATCHER_ITEMS = 500;
const MAX_WATCHER_RUNS = 200;
const QBIT_VERIFY_ATTEMPTS = 5;
const QBIT_VERIFY_DELAY_MS = 2500;
const WATCHER_IMAGE_CACHE_TTL_MS = 30 * 60_000;
const WATCHER_IMAGE_CACHE_LIMIT = 200;
const activeWatcherRuns = new Set<string>();
const watcherImageCache = new Map<
string,
{ contentType: string; data: Buffer; cachedAt: number }
>();
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
@@ -49,12 +62,37 @@ const statusLabel = (status: WatcherItem["status"]) => {
const deriveTrackerTorrentId = (pageUrl: string) => {
try {
const url = new URL(pageUrl);
return url.searchParams.get("id") ?? undefined;
const byQuery = url.searchParams.get("id");
if (byQuery) {
return byQuery;
}
const match = url.pathname.match(/\/torrent\/(\d+)|\/torrents\.php\/(\d+)/i);
return match?.[1] ?? match?.[2] ?? undefined;
} catch {
return undefined;
}
};
const parseSizeText = (value?: string | null) => {
if (!value?.trim()) {
return undefined;
}
const match = value.trim().match(/^([\d.,]+)\s*(B|KB|MB|GB|TB)$/i);
if (!match) {
return undefined;
}
const numeric = Number(match[1].replace(",", "."));
if (!Number.isFinite(numeric)) {
return undefined;
}
const units = ["B", "KB", "MB", "GB", "TB"] as const;
const power = units.indexOf(match[2].toUpperCase() as (typeof units)[number]);
if (power < 0) {
return undefined;
}
return Math.round(numeric * 1024 ** power);
};
const normalizeImageUrl = (pageUrl: string, imageUrl?: string | null) => {
if (!imageUrl?.trim()) {
return undefined;
@@ -66,6 +104,23 @@ const normalizeImageUrl = (pageUrl: string, imageUrl?: string | null) => {
}
};
const pruneWatcherImageCache = () => {
const cutoff = Date.now() - WATCHER_IMAGE_CACHE_TTL_MS;
for (const [key, value] of watcherImageCache.entries()) {
if (value.cachedAt < cutoff) {
watcherImageCache.delete(key);
}
}
while (watcherImageCache.size > WATCHER_IMAGE_CACHE_LIMIT) {
const oldest = watcherImageCache.keys().next().value;
if (!oldest) {
break;
}
watcherImageCache.delete(oldest);
}
};
const domainMatches = (targetHost: string, cookieDomain: string) => {
const normalizedCookieDomain = cookieDomain.replace(/^\./, "").toLowerCase();
const normalizedTargetHost = targetHost.toLowerCase();
@@ -258,7 +313,14 @@ const persistWatcherProgress = async (payload: {
await refreshWatcherSummary();
};
export const listTrackers = () => trackerRegistry;
export const listTrackers = async () => {
try {
return await listScraperTrackers();
} catch (error) {
logger.warn({ error }, "Falling back to local watcher tracker registry");
return trackerRegistry;
}
};
export const listWatchers = async () => {
const db = await readDb();
@@ -286,6 +348,7 @@ export const createWatcher = async (payload: {
cookie: string;
intervalMinutes: number;
category?: string;
wishlistUrl?: string;
enabled?: boolean;
}) => {
const tracker = getTrackerDefinition(payload.tracker);
@@ -295,11 +358,15 @@ export const createWatcher = async (payload: {
if (!payload.cookie.trim()) {
throw new Error("Cookie is required");
}
if (tracker.key === "privatehd" && !payload.wishlistUrl?.trim()) {
throw new Error("PrivateHD icin wishlist URL zorunludur.");
}
const db = await readDb();
const watcher: Watcher = {
id: randomUUID(),
tracker: tracker.key,
trackerLabel: tracker.label,
wishlistUrl: payload.wishlistUrl?.trim() || undefined,
category: payload.category?.trim() || undefined,
cookieEncrypted: encryptWatcherCookie(payload.cookie.trim()),
cookieHint: buildCookieHint(payload.cookie),
@@ -320,7 +387,13 @@ export const createWatcher = async (payload: {
export const updateWatcher = async (
watcherId: string,
payload: { cookie?: string; intervalMinutes?: number; category?: string; enabled?: boolean }
payload: {
cookie?: string;
intervalMinutes?: number;
category?: string;
wishlistUrl?: string;
enabled?: boolean;
}
) => {
const updated = await persistWatcher(watcherId, (watcher) => {
const next: Watcher = {
@@ -337,10 +410,16 @@ export const updateWatcher = async (
if (typeof payload.category === "string") {
next.category = payload.category.trim() || undefined;
}
if (typeof payload.wishlistUrl === "string") {
next.wishlistUrl = payload.wishlistUrl.trim() || undefined;
}
if (payload.cookie && payload.cookie.trim()) {
next.cookieEncrypted = encryptWatcherCookie(payload.cookie.trim());
next.cookieHint = buildCookieHint(payload.cookie);
}
if (next.tracker === "privatehd" && !next.wishlistUrl?.trim()) {
throw new Error("PrivateHD icin wishlist URL zorunludur.");
}
return next;
});
emitWatchersList((await readDb()).watchers?.map(toListItem) ?? []);
@@ -367,9 +446,28 @@ export const getWatcherSummary = async (): Promise<WatcherSummaryResponse> => {
};
export const getWatcherItems = async () => {
const db = await readDb();
const torrents = await getQbitClient().getTorrentsInfo().catch(() => []);
return (db.watcherItems ?? []).map((item) => mergeQbitState(item, torrents));
const db = await readDb();
const torrentHashes = new Set(
torrents.map((torrent) => torrent.hash.toLowerCase()).filter(Boolean)
);
const currentItems = db.watcherItems ?? [];
const nextItems = currentItems.filter((item) => {
if (!item.importedAt && !item.qbitHash) {
return true;
}
if (!item.qbitHash) {
return false;
}
return torrentHashes.has(item.qbitHash.toLowerCase());
});
if (nextItems.length !== currentItems.length) {
db.watcherItems = nextItems;
await writeDb(db);
}
return nextItems.map((item) => mergeQbitState(item, torrents));
};
export const getQbitCategories = async () => {
@@ -387,6 +485,13 @@ export const getQbitCategories = async () => {
};
export const fetchWatcherImage = async (watcherId: string, imageUrl: string) => {
pruneWatcherImageCache();
const cacheKey = `${watcherId}:${imageUrl}`;
const cached = watcherImageCache.get(cacheKey);
if (cached && Date.now() - cached.cachedAt < WATCHER_IMAGE_CACHE_TTL_MS) {
return cached;
}
const db = await readDb();
const watcher = (db.watchers ?? []).find((entry) => entry.id === watcherId);
if (!watcher) {
@@ -400,15 +505,22 @@ export const fetchWatcherImage = async (watcherId: string, imageUrl: string) =>
responseType: "arraybuffer",
headers: {
...(cookie ? { Cookie: cookie } : {}),
Referer: "https://www.happyfappy.net/",
Referer:
watcher.wishlistUrl ||
(watcher.tracker === "privatehd" ? "https://privatehd.to/" : "https://www.happyfappy.net/"),
"User-Agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/145.0.0.0 Safari/537.36",
},
});
return {
const image = {
contentType: response.headers["content-type"] || "image/jpeg",
data: Buffer.from(response.data),
};
watcherImageCache.set(cacheKey, {
...image,
cachedAt: Date.now(),
});
return image;
};
const recordRun = async (run: WatcherRun) => {
@@ -432,6 +544,44 @@ const findExistingItem = (items: WatcherItem[], watcherId: string, sourceKey: st
return items.find((item) => item.watcherId === watcherId && item.sourceKey === sourceKey);
};
const pruneWatcherItems = async (
watcherId: string,
bookmarks: BookmarkRecord[],
torrents: QbitTorrentInfo[]
) => {
const bookmarkKeys = new Set(bookmarks.map((bookmark) => bookmark.pageURL));
const torrentHashes = new Set(
torrents.map((torrent) => torrent.hash.toLowerCase()).filter(Boolean)
);
const db = await readDb();
const before = db.watcherItems ?? [];
const nextItems = before.filter((item) => {
if (item.watcherId !== watcherId) {
return true;
}
if (bookmarkKeys.has(item.sourceKey)) {
return true;
}
if (item.qbitHash && torrentHashes.has(item.qbitHash.toLowerCase())) {
return true;
}
return false;
});
if (nextItems.length === before.length) {
return nextItems;
}
db.watcherItems = nextItems;
await writeDb(db);
emitWatcherItems(nextItems.map((item) => mergeQbitState(item, torrents)));
return nextItems;
};
const verifyQbitImport = async (
torrentPath: string,
bookmark: BookmarkRecord,
@@ -501,9 +651,14 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
pageUrl: bookmark.pageURL,
title: bookmark.title,
imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage),
downloadUrl: bookmark.downloadURL ?? undefined,
removeToken: bookmark.removeToken ?? undefined,
status: "downloading_torrent",
statusLabel: statusLabel("downloading_torrent"),
trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL),
sizeBytes: parseSizeText(bookmark.size),
seeders: bookmark.seeders ?? undefined,
leechers: bookmark.leechers ?? undefined,
seenAt,
lastSyncAt: seenAt,
};
@@ -512,8 +667,9 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
const torrentPath = await runTorrentDownload(
tracker.cliSiteKey,
cookie,
bookmark.pageURL,
runPaths.torrentDir
bookmark,
runPaths.torrentDir,
watcher.wishlistUrl
);
item = {
...item,
@@ -543,6 +699,22 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
leechers: qbitResult.torrent.num_leechs,
lastSyncAt: nowIso(),
};
if (tracker.supportsRemoveBookmark) {
try {
await runBookmarkRemove(
tracker.cliSiteKey,
cookie,
bookmark,
watcher.wishlistUrl
);
} catch (error) {
item = {
...item,
errorMessage: error instanceof Error ? error.message : "Bookmark silinemedi",
lastSyncAt: nowIso(),
};
}
}
return item;
} finally {
await cleanupRunPaths(runPaths);
@@ -592,13 +764,18 @@ export const runWatcherById = async (watcherId: string) => {
const cookie = decryptWatcherCookie(watcher.cookieEncrypted);
const records = await runBookmarkFetch(
tracker.cliSiteKey,
cookie
cookie,
watcher.wishlistUrl
);
logger.info({ watcherId, count: records.length }, "Watcher bookmarks fetched");
newBookmarks = records.length;
const freshDb = await readDb();
const items = freshDb.watcherItems ?? [];
const processedSourceKeys = new Set(items.map((item) => item.sourceKey));
const torrents = await getQbitClient().getTorrentsInfo().catch(() => []);
const items = await pruneWatcherItems(watcher.id, records, torrents);
const processedSourceKeys = new Set(
items
.filter((item) => item.watcherId === watcher.id)
.map((item) => item.sourceKey)
);
for (const bookmark of records) {
if (processedSourceKeys.has(bookmark.pageURL)) {
logger.info({ watcherId, sourceKey: bookmark.pageURL }, "Watcher bookmark skipped due to dedupe set");
@@ -606,6 +783,13 @@ export const runWatcherById = async (watcherId: string) => {
}
const existing = findExistingItem(items, watcher.id, bookmark.pageURL);
if (existing) {
existing.title = bookmark.title;
existing.imageUrl = normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage) ?? existing.imageUrl;
existing.downloadUrl = bookmark.downloadURL ?? existing.downloadUrl;
existing.removeToken = bookmark.removeToken ?? existing.removeToken;
existing.sizeBytes = parseSizeText(bookmark.size) ?? existing.sizeBytes;
existing.seeders = bookmark.seeders ?? existing.seeders;
existing.leechers = bookmark.leechers ?? existing.leechers;
existing.lastSyncAt = nowIso();
await persistWatcherProgress({
watcherId: watcher.id,
@@ -653,9 +837,14 @@ export const runWatcherById = async (watcherId: string) => {
pageUrl: bookmark.pageURL,
title: bookmark.title,
imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage),
downloadUrl: bookmark.downloadURL ?? undefined,
removeToken: bookmark.removeToken ?? undefined,
status: "failed",
statusLabel: statusLabel("failed"),
trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL),
sizeBytes: parseSizeText(bookmark.size),
seeders: bookmark.seeders ?? undefined,
leechers: bookmark.leechers ?? undefined,
seenAt: nowIso(),
lastSyncAt: nowIso(),
errorMessage: error instanceof Error ? error.message : "Unknown watcher error",

View File

@@ -12,6 +12,11 @@ export interface BookmarkRecord {
isVR?: boolean;
title: string;
backgroundImage?: string | null;
downloadURL?: string | null;
removeToken?: string | null;
size?: string | null;
seeders?: number | null;
leechers?: number | null;
}
export interface ScraperRunPaths {