feat: PrivateHD watcher ve item tabanli scraper entegrasyonunu ekle

This commit is contained in:
2026-03-13 02:08:17 +03:00
parent baad2b3e96
commit bf278ad786
9 changed files with 355 additions and 93 deletions

View File

@@ -98,7 +98,7 @@ export interface TimerSummary {
updatedAt: string; updatedAt: string;
} }
export type WatcherTracker = "happyfappy"; export type WatcherTracker = string;
export type WatcherItemStatus = export type WatcherItemStatus =
| "bookmarked" | "bookmarked"
@@ -113,6 +113,7 @@ export interface Watcher {
id: string; id: string;
tracker: WatcherTracker; tracker: WatcherTracker;
trackerLabel: string; trackerLabel: string;
wishlistUrl?: string;
category?: string; category?: string;
cookieEncrypted: string; cookieEncrypted: string;
cookieHint: string; cookieHint: string;
@@ -137,6 +138,8 @@ export interface WatcherItem {
pageUrl: string; pageUrl: string;
title: string; title: string;
imageUrl?: string; imageUrl?: string;
downloadUrl?: string;
removeToken?: string;
status: WatcherItemStatus; status: WatcherItemStatus;
statusLabel: string; statusLabel: string;
qbitHash?: string; qbitHash?: string;

View File

@@ -7,6 +7,12 @@ export const trackerRegistry: TrackerDefinition[] = [
cliSiteKey: "happyfappy", cliSiteKey: "happyfappy",
supportsRemoveBookmark: true, supportsRemoveBookmark: true,
}, },
{
key: "privatehd",
label: "PrivateHD",
cliSiteKey: "privatehd",
supportsRemoveBookmark: true,
},
]; ];
export const getTrackerDefinition = (key: string) => { export const getTrackerDefinition = (key: string) => {

View File

@@ -1,4 +1,5 @@
import { Router } from "express"; import { Router } from "express";
import crypto from "node:crypto";
import { z } from "zod"; import { z } from "zod";
import { import {
createWatcher, createWatcher,
@@ -20,6 +21,7 @@ const createWatcherSchema = z.object({
cookie: z.string().min(1), cookie: z.string().min(1),
intervalMinutes: z.number().int().min(1).max(24 * 60), intervalMinutes: z.number().int().min(1).max(24 * 60),
category: z.string().optional(), category: z.string().optional(),
wishlistUrl: z.string().optional(),
enabled: z.boolean().optional(), enabled: z.boolean().optional(),
}); });
@@ -27,11 +29,12 @@ const updateWatcherSchema = z.object({
cookie: z.string().min(1).optional(), cookie: z.string().min(1).optional(),
intervalMinutes: z.number().int().min(1).max(24 * 60).optional(), intervalMinutes: z.number().int().min(1).max(24 * 60).optional(),
category: z.string().optional(), category: z.string().optional(),
wishlistUrl: z.string().optional(),
enabled: z.boolean().optional(), enabled: z.boolean().optional(),
}); });
router.get("/trackers", (_req, res) => { router.get("/trackers", async (_req, res) => {
return res.json(listTrackers()); return res.json(await listTrackers());
}); });
router.get("/categories", async (_req, res) => { router.get("/categories", async (_req, res) => {
@@ -105,8 +108,18 @@ router.get("/image", async (req, res) => {
} }
try { try {
const image = await fetchWatcherImage(watcherId, imageUrl); const image = await fetchWatcherImage(watcherId, imageUrl);
const etag = `"${crypto
.createHash("sha1")
.update(`${watcherId}:${imageUrl}:${image.data.length}`)
.digest("hex")}"`;
if (req.headers["if-none-match"] === etag) {
res.status(304);
return res.end();
}
res.setHeader("Content-Type", image.contentType); res.setHeader("Content-Type", image.contentType);
res.setHeader("Cache-Control", "private, max-age=300"); res.setHeader("Cache-Control", "private, max-age=1800, immutable");
res.setHeader("ETag", etag);
res.setHeader("Vary", "Accept-Encoding");
return res.send(image.data); return res.send(image.data);
} catch (error) { } catch (error) {
return res.status(400).json({ error: error instanceof Error ? error.message : "Failed to fetch image" }); return res.status(400).json({ error: error instanceof Error ? error.message : "Failed to fetch image" });

View File

@@ -4,7 +4,7 @@ import path from "node:path";
import axios from "axios"; import axios from "axios";
import { config } from "../config"; import { config } from "../config";
import { logger } from "../utils/logger"; import { logger } from "../utils/logger";
import { BookmarkRecord, ScraperRunPaths } from "./watcher.types"; import { BookmarkRecord, ScraperRunPaths, TrackerDefinition } from "./watcher.types";
const ensureDir = async (target: string) => { const ensureDir = async (target: string) => {
await fs.mkdir(target, { recursive: true }); await fs.mkdir(target, { recursive: true });
@@ -65,13 +65,25 @@ const request = async <T>(method: "GET" | "POST", url: string, body?: unknown) =
} }
}; };
export const listScraperTrackers = async () => {
const response = await request<{ items: Array<{ key: string; label: string }> }>("GET", "/trackers");
return response.items.map<TrackerDefinition>((item) => ({
key: item.key,
label: item.label,
cliSiteKey: item.key,
supportsRemoveBookmark: true,
}));
};
export const runBookmarkFetch = async ( export const runBookmarkFetch = async (
trackerSiteKey: string, trackerSiteKey: string,
cookie: string cookie: string,
wishlistUrl?: string
) => { ) => {
const response = await request<{ items: BookmarkRecord[] }>("POST", "/bookmarks", { const response = await request<{ items: BookmarkRecord[] }>("POST", "/bookmarks", {
tracker: trackerSiteKey, tracker: trackerSiteKey,
cookie, cookie,
wishlistUrl,
}); });
return response.items; return response.items;
}; };
@@ -79,20 +91,35 @@ export const runBookmarkFetch = async (
export const runTorrentDownload = async ( export const runTorrentDownload = async (
trackerSiteKey: string, trackerSiteKey: string,
cookie: string, cookie: string,
detailUrl: string, bookmark: BookmarkRecord,
outputDir: string outputDir: string,
wishlistUrl?: string
) => { ) => {
const response = await request<{ filename: string; contentBase64: string }>("POST", "/download", { const response = await request<{ filename: string; contentBase64: string }>("POST", "/download", {
tracker: trackerSiteKey, tracker: trackerSiteKey,
cookie, cookie,
url: detailUrl, item: bookmark,
removeBookmark: true, wishlistUrl,
}); });
const targetPath = path.join(outputDir, response.filename); const targetPath = path.join(outputDir, response.filename);
await fs.writeFile(targetPath, Buffer.from(response.contentBase64, "base64")); await fs.writeFile(targetPath, Buffer.from(response.contentBase64, "base64"));
return targetPath; return targetPath;
}; };
export const runBookmarkRemove = async (
trackerSiteKey: string,
cookie: string,
bookmark: BookmarkRecord,
wishlistUrl?: string
) => {
await request<{ ok: true }>("POST", "/remove-bookmark", {
tracker: trackerSiteKey,
cookie,
item: bookmark,
wishlistUrl,
});
};
export const cleanupRunPaths = async (paths: ScraperRunPaths) => { export const cleanupRunPaths = async (paths: ScraperRunPaths) => {
try { try {
await fs.rm(paths.runDir, { recursive: true, force: true }); await fs.rm(paths.runDir, { recursive: true, force: true });

View File

@@ -10,14 +10,27 @@ import { appendAuditLog, logger } from "../utils/logger";
import { emitWatcherItems, emitWatchersList, emitWatcherSummary } from "../realtime/emitter"; import { emitWatcherItems, emitWatchersList, emitWatcherSummary } from "../realtime/emitter";
import { buildCookieHint, decryptWatcherCookie, encryptWatcherCookie } from "./watcher.crypto"; import { buildCookieHint, decryptWatcherCookie, encryptWatcherCookie } from "./watcher.crypto";
import { getTrackerDefinition, trackerRegistry } from "./watcher.registry"; import { getTrackerDefinition, trackerRegistry } from "./watcher.registry";
import { cleanupRunPaths, createScraperRunPaths, runBookmarkFetch, runTorrentDownload } from "./watcher.scraper"; import {
cleanupRunPaths,
createScraperRunPaths,
listScraperTrackers,
runBookmarkFetch,
runBookmarkRemove,
runTorrentDownload,
} from "./watcher.scraper";
import { BookmarkRecord, EnrichedWatcherItem, WatcherListItem, WatcherSummaryResponse } from "./watcher.types"; import { BookmarkRecord, EnrichedWatcherItem, WatcherListItem, WatcherSummaryResponse } from "./watcher.types";
const MAX_WATCHER_ITEMS = 500; const MAX_WATCHER_ITEMS = 500;
const MAX_WATCHER_RUNS = 200; const MAX_WATCHER_RUNS = 200;
const QBIT_VERIFY_ATTEMPTS = 5; const QBIT_VERIFY_ATTEMPTS = 5;
const QBIT_VERIFY_DELAY_MS = 2500; const QBIT_VERIFY_DELAY_MS = 2500;
const WATCHER_IMAGE_CACHE_TTL_MS = 30 * 60_000;
const WATCHER_IMAGE_CACHE_LIMIT = 200;
const activeWatcherRuns = new Set<string>(); const activeWatcherRuns = new Set<string>();
const watcherImageCache = new Map<
string,
{ contentType: string; data: Buffer; cachedAt: number }
>();
const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)); const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));
@@ -49,12 +62,37 @@ const statusLabel = (status: WatcherItem["status"]) => {
const deriveTrackerTorrentId = (pageUrl: string) => { const deriveTrackerTorrentId = (pageUrl: string) => {
try { try {
const url = new URL(pageUrl); const url = new URL(pageUrl);
return url.searchParams.get("id") ?? undefined; const byQuery = url.searchParams.get("id");
if (byQuery) {
return byQuery;
}
const match = url.pathname.match(/\/torrent\/(\d+)|\/torrents\.php\/(\d+)/i);
return match?.[1] ?? match?.[2] ?? undefined;
} catch { } catch {
return undefined; return undefined;
} }
}; };
const parseSizeText = (value?: string | null) => {
if (!value?.trim()) {
return undefined;
}
const match = value.trim().match(/^([\d.,]+)\s*(B|KB|MB|GB|TB)$/i);
if (!match) {
return undefined;
}
const numeric = Number(match[1].replace(",", "."));
if (!Number.isFinite(numeric)) {
return undefined;
}
const units = ["B", "KB", "MB", "GB", "TB"] as const;
const power = units.indexOf(match[2].toUpperCase() as (typeof units)[number]);
if (power < 0) {
return undefined;
}
return Math.round(numeric * 1024 ** power);
};
const normalizeImageUrl = (pageUrl: string, imageUrl?: string | null) => { const normalizeImageUrl = (pageUrl: string, imageUrl?: string | null) => {
if (!imageUrl?.trim()) { if (!imageUrl?.trim()) {
return undefined; return undefined;
@@ -66,6 +104,23 @@ const normalizeImageUrl = (pageUrl: string, imageUrl?: string | null) => {
} }
}; };
const pruneWatcherImageCache = () => {
const cutoff = Date.now() - WATCHER_IMAGE_CACHE_TTL_MS;
for (const [key, value] of watcherImageCache.entries()) {
if (value.cachedAt < cutoff) {
watcherImageCache.delete(key);
}
}
while (watcherImageCache.size > WATCHER_IMAGE_CACHE_LIMIT) {
const oldest = watcherImageCache.keys().next().value;
if (!oldest) {
break;
}
watcherImageCache.delete(oldest);
}
};
const domainMatches = (targetHost: string, cookieDomain: string) => { const domainMatches = (targetHost: string, cookieDomain: string) => {
const normalizedCookieDomain = cookieDomain.replace(/^\./, "").toLowerCase(); const normalizedCookieDomain = cookieDomain.replace(/^\./, "").toLowerCase();
const normalizedTargetHost = targetHost.toLowerCase(); const normalizedTargetHost = targetHost.toLowerCase();
@@ -258,7 +313,14 @@ const persistWatcherProgress = async (payload: {
await refreshWatcherSummary(); await refreshWatcherSummary();
}; };
export const listTrackers = () => trackerRegistry; export const listTrackers = async () => {
try {
return await listScraperTrackers();
} catch (error) {
logger.warn({ error }, "Falling back to local watcher tracker registry");
return trackerRegistry;
}
};
export const listWatchers = async () => { export const listWatchers = async () => {
const db = await readDb(); const db = await readDb();
@@ -286,6 +348,7 @@ export const createWatcher = async (payload: {
cookie: string; cookie: string;
intervalMinutes: number; intervalMinutes: number;
category?: string; category?: string;
wishlistUrl?: string;
enabled?: boolean; enabled?: boolean;
}) => { }) => {
const tracker = getTrackerDefinition(payload.tracker); const tracker = getTrackerDefinition(payload.tracker);
@@ -295,11 +358,15 @@ export const createWatcher = async (payload: {
if (!payload.cookie.trim()) { if (!payload.cookie.trim()) {
throw new Error("Cookie is required"); throw new Error("Cookie is required");
} }
if (tracker.key === "privatehd" && !payload.wishlistUrl?.trim()) {
throw new Error("PrivateHD icin wishlist URL zorunludur.");
}
const db = await readDb(); const db = await readDb();
const watcher: Watcher = { const watcher: Watcher = {
id: randomUUID(), id: randomUUID(),
tracker: tracker.key, tracker: tracker.key,
trackerLabel: tracker.label, trackerLabel: tracker.label,
wishlistUrl: payload.wishlistUrl?.trim() || undefined,
category: payload.category?.trim() || undefined, category: payload.category?.trim() || undefined,
cookieEncrypted: encryptWatcherCookie(payload.cookie.trim()), cookieEncrypted: encryptWatcherCookie(payload.cookie.trim()),
cookieHint: buildCookieHint(payload.cookie), cookieHint: buildCookieHint(payload.cookie),
@@ -320,7 +387,13 @@ export const createWatcher = async (payload: {
export const updateWatcher = async ( export const updateWatcher = async (
watcherId: string, watcherId: string,
payload: { cookie?: string; intervalMinutes?: number; category?: string; enabled?: boolean } payload: {
cookie?: string;
intervalMinutes?: number;
category?: string;
wishlistUrl?: string;
enabled?: boolean;
}
) => { ) => {
const updated = await persistWatcher(watcherId, (watcher) => { const updated = await persistWatcher(watcherId, (watcher) => {
const next: Watcher = { const next: Watcher = {
@@ -337,10 +410,16 @@ export const updateWatcher = async (
if (typeof payload.category === "string") { if (typeof payload.category === "string") {
next.category = payload.category.trim() || undefined; next.category = payload.category.trim() || undefined;
} }
if (typeof payload.wishlistUrl === "string") {
next.wishlistUrl = payload.wishlistUrl.trim() || undefined;
}
if (payload.cookie && payload.cookie.trim()) { if (payload.cookie && payload.cookie.trim()) {
next.cookieEncrypted = encryptWatcherCookie(payload.cookie.trim()); next.cookieEncrypted = encryptWatcherCookie(payload.cookie.trim());
next.cookieHint = buildCookieHint(payload.cookie); next.cookieHint = buildCookieHint(payload.cookie);
} }
if (next.tracker === "privatehd" && !next.wishlistUrl?.trim()) {
throw new Error("PrivateHD icin wishlist URL zorunludur.");
}
return next; return next;
}); });
emitWatchersList((await readDb()).watchers?.map(toListItem) ?? []); emitWatchersList((await readDb()).watchers?.map(toListItem) ?? []);
@@ -367,9 +446,28 @@ export const getWatcherSummary = async (): Promise<WatcherSummaryResponse> => {
}; };
export const getWatcherItems = async () => { export const getWatcherItems = async () => {
const db = await readDb();
const torrents = await getQbitClient().getTorrentsInfo().catch(() => []); const torrents = await getQbitClient().getTorrentsInfo().catch(() => []);
return (db.watcherItems ?? []).map((item) => mergeQbitState(item, torrents)); const db = await readDb();
const torrentHashes = new Set(
torrents.map((torrent) => torrent.hash.toLowerCase()).filter(Boolean)
);
const currentItems = db.watcherItems ?? [];
const nextItems = currentItems.filter((item) => {
if (!item.importedAt && !item.qbitHash) {
return true;
}
if (!item.qbitHash) {
return false;
}
return torrentHashes.has(item.qbitHash.toLowerCase());
});
if (nextItems.length !== currentItems.length) {
db.watcherItems = nextItems;
await writeDb(db);
}
return nextItems.map((item) => mergeQbitState(item, torrents));
}; };
export const getQbitCategories = async () => { export const getQbitCategories = async () => {
@@ -387,6 +485,13 @@ export const getQbitCategories = async () => {
}; };
export const fetchWatcherImage = async (watcherId: string, imageUrl: string) => { export const fetchWatcherImage = async (watcherId: string, imageUrl: string) => {
pruneWatcherImageCache();
const cacheKey = `${watcherId}:${imageUrl}`;
const cached = watcherImageCache.get(cacheKey);
if (cached && Date.now() - cached.cachedAt < WATCHER_IMAGE_CACHE_TTL_MS) {
return cached;
}
const db = await readDb(); const db = await readDb();
const watcher = (db.watchers ?? []).find((entry) => entry.id === watcherId); const watcher = (db.watchers ?? []).find((entry) => entry.id === watcherId);
if (!watcher) { if (!watcher) {
@@ -400,15 +505,22 @@ export const fetchWatcherImage = async (watcherId: string, imageUrl: string) =>
responseType: "arraybuffer", responseType: "arraybuffer",
headers: { headers: {
...(cookie ? { Cookie: cookie } : {}), ...(cookie ? { Cookie: cookie } : {}),
Referer: "https://www.happyfappy.net/", Referer:
watcher.wishlistUrl ||
(watcher.tracker === "privatehd" ? "https://privatehd.to/" : "https://www.happyfappy.net/"),
"User-Agent": "User-Agent":
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/145.0.0.0 Safari/537.36", "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/145.0.0.0 Safari/537.36",
}, },
}); });
return { const image = {
contentType: response.headers["content-type"] || "image/jpeg", contentType: response.headers["content-type"] || "image/jpeg",
data: Buffer.from(response.data), data: Buffer.from(response.data),
}; };
watcherImageCache.set(cacheKey, {
...image,
cachedAt: Date.now(),
});
return image;
}; };
const recordRun = async (run: WatcherRun) => { const recordRun = async (run: WatcherRun) => {
@@ -432,6 +544,44 @@ const findExistingItem = (items: WatcherItem[], watcherId: string, sourceKey: st
return items.find((item) => item.watcherId === watcherId && item.sourceKey === sourceKey); return items.find((item) => item.watcherId === watcherId && item.sourceKey === sourceKey);
}; };
const pruneWatcherItems = async (
watcherId: string,
bookmarks: BookmarkRecord[],
torrents: QbitTorrentInfo[]
) => {
const bookmarkKeys = new Set(bookmarks.map((bookmark) => bookmark.pageURL));
const torrentHashes = new Set(
torrents.map((torrent) => torrent.hash.toLowerCase()).filter(Boolean)
);
const db = await readDb();
const before = db.watcherItems ?? [];
const nextItems = before.filter((item) => {
if (item.watcherId !== watcherId) {
return true;
}
if (bookmarkKeys.has(item.sourceKey)) {
return true;
}
if (item.qbitHash && torrentHashes.has(item.qbitHash.toLowerCase())) {
return true;
}
return false;
});
if (nextItems.length === before.length) {
return nextItems;
}
db.watcherItems = nextItems;
await writeDb(db);
emitWatcherItems(nextItems.map((item) => mergeQbitState(item, torrents)));
return nextItems;
};
const verifyQbitImport = async ( const verifyQbitImport = async (
torrentPath: string, torrentPath: string,
bookmark: BookmarkRecord, bookmark: BookmarkRecord,
@@ -501,9 +651,14 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
pageUrl: bookmark.pageURL, pageUrl: bookmark.pageURL,
title: bookmark.title, title: bookmark.title,
imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage), imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage),
downloadUrl: bookmark.downloadURL ?? undefined,
removeToken: bookmark.removeToken ?? undefined,
status: "downloading_torrent", status: "downloading_torrent",
statusLabel: statusLabel("downloading_torrent"), statusLabel: statusLabel("downloading_torrent"),
trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL), trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL),
sizeBytes: parseSizeText(bookmark.size),
seeders: bookmark.seeders ?? undefined,
leechers: bookmark.leechers ?? undefined,
seenAt, seenAt,
lastSyncAt: seenAt, lastSyncAt: seenAt,
}; };
@@ -512,8 +667,9 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
const torrentPath = await runTorrentDownload( const torrentPath = await runTorrentDownload(
tracker.cliSiteKey, tracker.cliSiteKey,
cookie, cookie,
bookmark.pageURL, bookmark,
runPaths.torrentDir runPaths.torrentDir,
watcher.wishlistUrl
); );
item = { item = {
...item, ...item,
@@ -543,6 +699,22 @@ const processBookmark = async (watcher: Watcher, bookmark: BookmarkRecord) => {
leechers: qbitResult.torrent.num_leechs, leechers: qbitResult.torrent.num_leechs,
lastSyncAt: nowIso(), lastSyncAt: nowIso(),
}; };
if (tracker.supportsRemoveBookmark) {
try {
await runBookmarkRemove(
tracker.cliSiteKey,
cookie,
bookmark,
watcher.wishlistUrl
);
} catch (error) {
item = {
...item,
errorMessage: error instanceof Error ? error.message : "Bookmark silinemedi",
lastSyncAt: nowIso(),
};
}
}
return item; return item;
} finally { } finally {
await cleanupRunPaths(runPaths); await cleanupRunPaths(runPaths);
@@ -592,13 +764,18 @@ export const runWatcherById = async (watcherId: string) => {
const cookie = decryptWatcherCookie(watcher.cookieEncrypted); const cookie = decryptWatcherCookie(watcher.cookieEncrypted);
const records = await runBookmarkFetch( const records = await runBookmarkFetch(
tracker.cliSiteKey, tracker.cliSiteKey,
cookie cookie,
watcher.wishlistUrl
); );
logger.info({ watcherId, count: records.length }, "Watcher bookmarks fetched"); logger.info({ watcherId, count: records.length }, "Watcher bookmarks fetched");
newBookmarks = records.length; newBookmarks = records.length;
const freshDb = await readDb(); const torrents = await getQbitClient().getTorrentsInfo().catch(() => []);
const items = freshDb.watcherItems ?? []; const items = await pruneWatcherItems(watcher.id, records, torrents);
const processedSourceKeys = new Set(items.map((item) => item.sourceKey)); const processedSourceKeys = new Set(
items
.filter((item) => item.watcherId === watcher.id)
.map((item) => item.sourceKey)
);
for (const bookmark of records) { for (const bookmark of records) {
if (processedSourceKeys.has(bookmark.pageURL)) { if (processedSourceKeys.has(bookmark.pageURL)) {
logger.info({ watcherId, sourceKey: bookmark.pageURL }, "Watcher bookmark skipped due to dedupe set"); logger.info({ watcherId, sourceKey: bookmark.pageURL }, "Watcher bookmark skipped due to dedupe set");
@@ -606,6 +783,13 @@ export const runWatcherById = async (watcherId: string) => {
} }
const existing = findExistingItem(items, watcher.id, bookmark.pageURL); const existing = findExistingItem(items, watcher.id, bookmark.pageURL);
if (existing) { if (existing) {
existing.title = bookmark.title;
existing.imageUrl = normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage) ?? existing.imageUrl;
existing.downloadUrl = bookmark.downloadURL ?? existing.downloadUrl;
existing.removeToken = bookmark.removeToken ?? existing.removeToken;
existing.sizeBytes = parseSizeText(bookmark.size) ?? existing.sizeBytes;
existing.seeders = bookmark.seeders ?? existing.seeders;
existing.leechers = bookmark.leechers ?? existing.leechers;
existing.lastSyncAt = nowIso(); existing.lastSyncAt = nowIso();
await persistWatcherProgress({ await persistWatcherProgress({
watcherId: watcher.id, watcherId: watcher.id,
@@ -653,9 +837,14 @@ export const runWatcherById = async (watcherId: string) => {
pageUrl: bookmark.pageURL, pageUrl: bookmark.pageURL,
title: bookmark.title, title: bookmark.title,
imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage), imageUrl: normalizeImageUrl(bookmark.pageURL, bookmark.backgroundImage),
downloadUrl: bookmark.downloadURL ?? undefined,
removeToken: bookmark.removeToken ?? undefined,
status: "failed", status: "failed",
statusLabel: statusLabel("failed"), statusLabel: statusLabel("failed"),
trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL), trackerTorrentId: deriveTrackerTorrentId(bookmark.pageURL),
sizeBytes: parseSizeText(bookmark.size),
seeders: bookmark.seeders ?? undefined,
leechers: bookmark.leechers ?? undefined,
seenAt: nowIso(), seenAt: nowIso(),
lastSyncAt: nowIso(), lastSyncAt: nowIso(),
errorMessage: error instanceof Error ? error.message : "Unknown watcher error", errorMessage: error instanceof Error ? error.message : "Unknown watcher error",

View File

@@ -12,6 +12,11 @@ export interface BookmarkRecord {
isVR?: boolean; isVR?: boolean;
title: string; title: string;
backgroundImage?: string | null; backgroundImage?: string | null;
downloadURL?: string | null;
removeToken?: string | null;
size?: string | null;
seeders?: number | null;
leechers?: number | null;
} }
export interface ScraperRunPaths { export interface ScraperRunPaths {

View File

@@ -104,7 +104,9 @@ export const WatcherPage = () => {
const pushAlert = useUiStore((s) => s.pushAlert); const pushAlert = useUiStore((s) => s.pushAlert);
const [selectedWatcherId, setSelectedWatcherId] = useState<string | null>(null); const [selectedWatcherId, setSelectedWatcherId] = useState<string | null>(null);
const [isCreatingNew, setIsCreatingNew] = useState(false);
const [tracker, setTracker] = useState("happyfappy"); const [tracker, setTracker] = useState("happyfappy");
const [wishlistUrl, setWishlistUrl] = useState("");
const [cookie, setCookie] = useState(""); const [cookie, setCookie] = useState("");
const [category, setCategory] = useState("__none__"); const [category, setCategory] = useState("__none__");
const [showCookie, setShowCookie] = useState(false); const [showCookie, setShowCookie] = useState(false);
@@ -152,7 +154,7 @@ export const WatcherPage = () => {
} }
if (watchersLoaded) { if (watchersLoaded) {
setWatchers(watchersLoaded); setWatchers(watchersLoaded);
if (!selectedWatcherId && watchersLoaded[0]?.id) { if (!selectedWatcherId && !isCreatingNew && watchersLoaded[0]?.id) {
setSelectedWatcherId(watchersLoaded[0].id); setSelectedWatcherId(watchersLoaded[0].id);
} }
} }
@@ -168,13 +170,15 @@ export const WatcherPage = () => {
load(); load();
const interval = setInterval(load, 15000); const interval = setInterval(load, 15000);
return () => clearInterval(interval); return () => clearInterval(interval);
}, []); }, [selectedWatcherId, isCreatingNew]);
useEffect(() => { useEffect(() => {
if (!selectedWatcher) { if (!selectedWatcher) {
return; return;
} }
setIsCreatingNew(false);
setTracker(selectedWatcher.tracker); setTracker(selectedWatcher.tracker);
setWishlistUrl(selectedWatcher.wishlistUrl ?? "");
setCategory(selectedWatcher.category ?? "__none__"); setCategory(selectedWatcher.category ?? "__none__");
setCookie(""); setCookie("");
setShowCookie(false); setShowCookie(false);
@@ -194,8 +198,10 @@ export const WatcherPage = () => {
}, [intervalUnit, intervalValue]); }, [intervalUnit, intervalValue]);
const resetForm = () => { const resetForm = () => {
setIsCreatingNew(true);
setSelectedWatcherId(null); setSelectedWatcherId(null);
setTracker(watcherTrackers[0]?.key ?? "happyfappy"); setTracker(watcherTrackers[0]?.key ?? "happyfappy");
setWishlistUrl("");
setCategory("__none__"); setCategory("__none__");
setCookie(""); setCookie("");
setShowCookie(false); setShowCookie(false);
@@ -204,6 +210,8 @@ export const WatcherPage = () => {
setEnabled(true); setEnabled(true);
}; };
const requiresWishlistUrl = tracker === "privatehd";
const submit = async () => { const submit = async () => {
if (!cookie.trim() && !selectedWatcher) { if (!cookie.trim() && !selectedWatcher) {
pushAlert({ pushAlert({
@@ -213,12 +221,21 @@ export const WatcherPage = () => {
}); });
return; return;
} }
if (requiresWishlistUrl && !wishlistUrl.trim()) {
pushAlert({
title: "Wishlist URL gerekli",
description: "PrivateHD watcher icin wishlist URL girin.",
variant: "warn",
});
return;
}
setSaving(true); setSaving(true);
try { try {
if (selectedWatcher) { if (selectedWatcher) {
await api.patch(`/api/watchers/${selectedWatcher.id}`, { await api.patch(`/api/watchers/${selectedWatcher.id}`, {
tracker, tracker,
category: category === "__none__" ? "" : category, category: category === "__none__" ? "" : category,
wishlistUrl,
intervalMinutes, intervalMinutes,
enabled, enabled,
...(cookie.trim() ? { cookie } : {}), ...(cookie.trim() ? { cookie } : {}),
@@ -232,11 +249,13 @@ export const WatcherPage = () => {
const response = await api.post("/api/watchers", { const response = await api.post("/api/watchers", {
tracker, tracker,
category: category === "__none__" ? "" : category, category: category === "__none__" ? "" : category,
wishlistUrl,
cookie, cookie,
intervalMinutes, intervalMinutes,
enabled, enabled,
}); });
setWatchers([response.data, ...watchers]); setWatchers([response.data, ...watchers]);
setIsCreatingNew(false);
setSelectedWatcherId(response.data.id); setSelectedWatcherId(response.data.id);
pushAlert({ pushAlert({
title: "Watcher eklendi", title: "Watcher eklendi",
@@ -499,7 +518,16 @@ export const WatcherPage = () => {
<span className="text-xs font-semibold uppercase tracking-[0.18em] text-slate-500"> <span className="text-xs font-semibold uppercase tracking-[0.18em] text-slate-500">
Tracker Tracker
</span> </span>
<Select value={tracker} onValueChange={setTracker} disabled={Boolean(selectedWatcher)}> <Select
value={tracker}
onValueChange={(value) => {
setTracker(value);
if (!selectedWatcher) {
setIsCreatingNew(true);
}
}}
disabled={Boolean(selectedWatcher)}
>
<SelectTrigger className="w-full rounded-xl"> <SelectTrigger className="w-full rounded-xl">
<SelectValue placeholder="Tracker sec" /> <SelectValue placeholder="Tracker sec" />
</SelectTrigger> </SelectTrigger>
@@ -513,6 +541,28 @@ export const WatcherPage = () => {
</Select> </Select>
</label> </label>
<label className="block space-y-2">
<span className="text-xs font-semibold uppercase tracking-[0.18em] text-slate-500">
Wishlist URL
</span>
<Input
type="text"
value={wishlistUrl}
onChange={(event) => setWishlistUrl(event.target.value)}
placeholder={
requiresWishlistUrl
? "https://privatehd.to/profile/kullanici/wishlist"
: "Bu tracker icin bos birakilabilir"
}
className="rounded-xl"
/>
<div className="text-xs text-slate-500">
{requiresWishlistUrl
? "PrivateHD icin bookmark kaynaginin tam URL'si zorunlu."
: "HappyFappy gibi tracker'larda bu alan opsiyoneldir."}
</div>
</label>
<label className="block space-y-2"> <label className="block space-y-2">
<div className="flex items-center justify-between"> <div className="flex items-center justify-between">
<span className="text-xs font-semibold uppercase tracking-[0.18em] text-slate-500"> <span className="text-xs font-semibold uppercase tracking-[0.18em] text-slate-500">

View File

@@ -91,6 +91,7 @@ export interface Watcher {
id: string; id: string;
tracker: string; tracker: string;
trackerLabel: string; trackerLabel: string;
wishlistUrl?: string;
category?: string; category?: string;
cookieHint: string; cookieHint: string;
hasCookie: boolean; hasCookie: boolean;
@@ -115,6 +116,8 @@ export interface WatcherItem {
pageUrl: string; pageUrl: string;
title: string; title: string;
imageUrl?: string; imageUrl?: string;
downloadUrl?: string;
removeToken?: string;
status: string; status: string;
statusLabel: string; statusLabel: string;
qbitHash?: string; qbitHash?: string;

View File

@@ -1,11 +1,9 @@
from __future__ import annotations from __future__ import annotations
import argparse
import base64 import base64
import json import json
import os import os
import sys import sys
import tempfile
from http import HTTPStatus from http import HTTPStatus
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
from pathlib import Path from pathlib import Path
@@ -16,7 +14,7 @@ WSCRAPER_SRC = REPO_ROOT / "bin" / "wscraper" / "src"
if str(WSCRAPER_SRC) not in sys.path: if str(WSCRAPER_SRC) not in sys.path:
sys.path.insert(0, str(WSCRAPER_SRC)) sys.path.insert(0, str(WSCRAPER_SRC))
from wscraper.sites.happyfappy import run_download_torrent_files, run_get_bookmarks from wscraper.registry import get_tracker, list_trackers, normalize_tracker
HOST = os.environ.get("WSCRAPER_SERVICE_HOST", "0.0.0.0") HOST = os.environ.get("WSCRAPER_SERVICE_HOST", "0.0.0.0")
PORT = int(os.environ.get("WSCRAPER_SERVICE_PORT", "8787")) PORT = int(os.environ.get("WSCRAPER_SERVICE_PORT", "8787"))
@@ -48,15 +46,24 @@ def require_auth(handler: BaseHTTPRequestHandler) -> bool:
return False return False
def normalize_tracker(payload: dict) -> str: def normalize_payload(payload: dict) -> tuple[str, str, dict, str | None]:
tracker = str(payload.get("tracker", "")).strip().lower() tracker_key = normalize_tracker(str(payload.get("tracker", "")))
if tracker not in {"happyfappy", "hf"}: cookie = str(payload.get("cookie", "")).strip()
raise ValueError("Unsupported tracker") if not cookie:
return "happyfappy" raise ValueError("Cookie is required")
item = payload.get("item")
if item is None:
item = {}
if not isinstance(item, dict):
raise ValueError("Item payload must be an object")
wishlist_url = payload.get("wishlistUrl")
if wishlist_url is not None:
wishlist_url = str(wishlist_url).strip() or None
return tracker_key, cookie, item, wishlist_url
class Handler(BaseHTTPRequestHandler): class Handler(BaseHTTPRequestHandler):
server_version = "wscraper-service/1.0" server_version = "wscraper-service/2.0"
def do_GET(self) -> None: # noqa: N802 def do_GET(self) -> None: # noqa: N802
parsed = urlparse(self.path) parsed = urlparse(self.path)
@@ -71,7 +78,7 @@ class Handler(BaseHTTPRequestHandler):
json_response( json_response(
self, self,
HTTPStatus.OK, HTTPStatus.OK,
{"items": [{"key": "happyfappy", "label": "HappyFappy"}]}, {"items": [{"key": tracker.key, "label": tracker.label} for tracker in list_trackers()]},
) )
return return
json_response(self, HTTPStatus.NOT_FOUND, {"error": "Not found"}) json_response(self, HTTPStatus.NOT_FOUND, {"error": "Not found"})
@@ -83,76 +90,35 @@ class Handler(BaseHTTPRequestHandler):
parsed = urlparse(self.path) parsed = urlparse(self.path)
try: try:
payload = parse_json_body(self) payload = parse_json_body(self)
tracker_key, cookie, item, wishlist_url = normalize_payload(payload)
tracker = get_tracker(tracker_key)
if parsed.path == "/bookmarks": if parsed.path == "/bookmarks":
tracker = normalize_tracker(payload) items = tracker.get_bookmarks(cookie, wishlist_url=wishlist_url)
cookie = str(payload.get("cookie", "")).strip() json_response(self, HTTPStatus.OK, {"tracker": tracker_key, "items": items})
if not cookie:
raise ValueError("Cookie is required")
with tempfile.TemporaryDirectory(prefix="wscraper-bookmarks-") as tmpdir:
output_path = Path(tmpdir) / "bookmarks.json"
run_get_bookmarks(
argparse.Namespace(
base_url="https://www.happyfappy.net",
cookie=cookie,
cookie_file=None,
output=str(output_path),
delay_min=1.8,
delay_max=3.2,
retries=3,
backoff_base=5.0,
max_pages=200,
)
)
items = json.loads(output_path.read_text(encoding="utf-8"))
json_response(self, HTTPStatus.OK, {"tracker": tracker, "items": items})
return return
if parsed.path == "/download": if parsed.path == "/download":
tracker = normalize_tracker(payload) result = tracker.download_torrent(cookie, item, wishlist_url=wishlist_url)
cookie = str(payload.get("cookie", "")).strip()
detail_url = str(payload.get("url", "")).strip()
remove_bookmark = bool(payload.get("removeBookmark", True))
if not cookie:
raise ValueError("Cookie is required")
if not detail_url:
raise ValueError("Detail url is required")
with tempfile.TemporaryDirectory(prefix="wscraper-download-") as tmpdir:
output_dir = Path(tmpdir) / "torrent"
run_download_torrent_files(
argparse.Namespace(
url=detail_url,
base_url="https://www.happyfappy.net",
cookie=cookie,
cookie_file=None,
output_dir=str(output_dir),
rm_bookmark=remove_bookmark,
retries=3,
backoff_base=5.0,
)
)
files = sorted(output_dir.glob("*.torrent"))
if not files:
raise RuntimeError("No torrent file produced")
torrent_path = files[0]
content = base64.b64encode(torrent_path.read_bytes()).decode("ascii")
json_response( json_response(
self, self,
HTTPStatus.OK, HTTPStatus.OK,
{ {
"tracker": tracker, "tracker": tracker_key,
"filename": torrent_path.name, "filename": result["filename"],
"contentBase64": content, "contentBase64": base64.b64encode(result["data"]).decode("ascii"),
}, },
) )
return return
if parsed.path == "/remove-bookmark":
tracker.remove_bookmark(cookie, item, wishlist_url=wishlist_url)
json_response(self, HTTPStatus.OK, {"tracker": tracker_key, "ok": True})
return
json_response(self, HTTPStatus.NOT_FOUND, {"error": "Not found"}) json_response(self, HTTPStatus.NOT_FOUND, {"error": "Not found"})
except Exception as error: # noqa: BLE001 except Exception as error: # noqa: BLE001
json_response( json_response(self, HTTPStatus.BAD_REQUEST, {"error": str(error)})
self,
HTTPStatus.BAD_REQUEST,
{"error": str(error)},
)
def log_message(self, fmt: str, *args) -> None: def log_message(self, fmt: str, *args) -> None:
print(f"[wscraper-service] {self.address_string()} - {fmt % args}") print(f"[wscraper-service] {self.address_string()} - {fmt % args}")