first commit

This commit is contained in:
2026-01-02 15:49:01 +03:00
commit 4348f76a7c
80 changed files with 10133 additions and 0 deletions

View File

@@ -0,0 +1,22 @@
import { Request, Response, NextFunction } from "express";
import { verifyToken } from "./auth.service"
export const requireAuth = (req: Request, res: Response, next: NextFunction) => {
const token = req.cookies?.["qbuffer_token"];
if (!token) {
return res.status(401).json({ error: "Unauthorized" });
}
try {
const payload = verifyToken(token);
req.user = payload;
return next();
} catch (error) {
return res.status(401).json({ error: "Unauthorized" });
}
};
declare module "express-serve-static-core" {
interface Request {
user?: { username: string };
}
}

View File

@@ -0,0 +1,64 @@
import { Router } from "express";
import rateLimit from "express-rate-limit";
import { signToken, verifyCredentials, verifyToken } from "./auth.service"
import { isDev } from "../config"
const router = Router();
const loginLimiter = rateLimit({
windowMs: 60_000,
max: 5,
standardHeaders: true,
legacyHeaders: false,
});
router.post("/login", loginLimiter, async (req, res) => {
const { username, password } = req.body ?? {};
if (!username || !password) {
return res.status(400).json({ error: "Missing credentials" });
}
const user = await verifyCredentials(username, password);
if (!user) {
return res.status(401).json({ error: "Invalid credentials" });
}
const token = signToken({ username: user.username });
res.cookie("qbuffer_token", token, {
httpOnly: true,
sameSite: "lax",
secure: !isDev,
});
return res.json({ username: user.username });
});
router.post("/logout", (_req, res) => {
res.clearCookie("qbuffer_token");
return res.json({ ok: true });
});
router.get("/me", (req, res) => {
const token = req.cookies?.["qbuffer_token"];
if (!token) {
return res.status(401).json({ error: "Unauthorized" });
}
try {
const payload = verifyToken(token);
return res.json({ ok: true, username: payload.username });
} catch (error) {
return res.status(401).json({ error: "Unauthorized" });
}
});
router.get("/socket-token", (req, res) => {
const token = req.cookies?.["qbuffer_token"];
if (!token) {
return res.status(401).json({ error: "Unauthorized" });
}
try {
verifyToken(token);
return res.json({ token });
} catch (error) {
return res.status(401).json({ error: "Unauthorized" });
}
});
export default router;

View File

@@ -0,0 +1,48 @@
import bcrypt from "bcryptjs";
import jwt from "jsonwebtoken";
import { config } from "../config"
import { readDb, writeDb } from "../storage/jsondb"
import { nowIso } from "../utils/time"
import { User } from "../types"
const ensureSeedUser = async (): Promise<User | null> => {
if (!config.appUsername || !config.appPassword) {
return null;
}
const db = await readDb();
const existing = db.users.find((user) => user.username === config.appUsername);
if (existing) {
return existing;
}
const passwordHash = await bcrypt.hash(config.appPassword, 10);
const newUser: User = {
username: config.appUsername,
passwordHash,
createdAt: nowIso(),
};
db.users.push(newUser);
await writeDb(db);
return newUser;
};
export const initAuth = async () => {
await ensureSeedUser();
};
export const verifyCredentials = async (username: string, password: string) => {
const db = await readDb();
const user = db.users.find((u) => u.username === username);
if (!user) {
return null;
}
const match = await bcrypt.compare(password, user.passwordHash);
return match ? user : null;
};
export const signToken = (payload: { username: string }) => {
return jwt.sign(payload, config.jwtSecret, { expiresIn: "7d" });
};
export const verifyToken = (token: string) => {
return jwt.verify(token, config.jwtSecret) as { username: string };
};

32
apps/server/src/config.ts Normal file
View File

@@ -0,0 +1,32 @@
import path from "node:path";
const envNumber = (value: string | undefined, fallback: number) => {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : fallback;
};
export const config = {
port: envNumber(process.env.SERVER_PORT, 3001),
nodeEnv: process.env.NODE_ENV ?? "development",
qbitBaseUrl: process.env.QBIT_BASE_URL ?? "",
qbitUsername: process.env.QBIT_USERNAME ?? "",
qbitPassword: process.env.QBIT_PASSWORD ?? "",
appUsername: process.env.APP_USERNAME ?? "",
appPassword: process.env.APP_PASSWORD ?? "",
jwtSecret: process.env.JWT_SECRET ?? "",
pollIntervalMs: envNumber(process.env.POLL_INTERVAL_MS, 3000),
enforceIntervalMs: envNumber(process.env.ENFORCE_INTERVAL_MS, 2000),
defaultDelayMs: envNumber(process.env.DEFAULT_DELAY_MS, 3000),
maxLoopLimit: envNumber(process.env.MAX_LOOP_LIMIT, 20),
stalledRecoveryMs: envNumber(process.env.STALLED_RECOVERY_MS, 300_000),
timerPollMs: envNumber(process.env.TIMER_POLL_MS, 60_000),
webPort: envNumber(process.env.WEB_PORT, 5173),
webOrigin: process.env.WEB_ORIGIN ?? "",
dataDir: "/app/data",
dbPath: "/app/data/db.json",
logsPath: "/app/data/logs.json",
torrentArchiveDir: "/app/data/torrents",
webPublicDir: path.resolve("/app/apps/server/public"),
};
export const isDev = config.nodeEnv !== "production";

View File

@@ -0,0 +1,5 @@
export interface EnforcementResult {
jobId: string;
bannedIps: string[];
allowIpConnected: boolean;
}

View File

@@ -0,0 +1,122 @@
import axios from "axios";
import { getQbitCapabilities, getQbitClient } from "../qbit/qbit.context";
import { readDb, writeDb } from "../storage/jsondb";
import { nowIso } from "../utils/time";
import { emitJobLog, emitJobMetrics } from "../realtime/emitter";
import { appendAuditLog } from "../utils/logger";
const peerErrorThrottle = new Map<string, number>();
export const startEnforcementWorker = (intervalMs: number) => {
setInterval(async () => {
let db;
try {
const qbit = getQbitClient();
const caps = getQbitCapabilities();
db = await readDb();
for (const job of db.loopJobs) {
try {
if (job.status !== "RUNNING") {
continue;
}
if (!caps) {
continue;
}
if (!caps?.hasPeersEndpoint) {
emitJobLog({
jobId: job.id,
level: "WARN",
message: "Peer listing unsupported; enforcement disabled",
createdAt: nowIso(),
});
continue;
}
let peersResponse;
try {
peersResponse = await qbit.getTorrentPeers(job.torrentHash);
} catch (error) {
const status = axios.isAxiosError(error) ? error.response?.status : undefined;
if (status === 404) {
const lastWarn = peerErrorThrottle.get(job.id) ?? 0;
if (Date.now() - lastWarn > 60_000) {
emitJobLog({
jobId: job.id,
level: "WARN",
message: "Peer listesi desteklenmiyor; enforcement devre dışı.",
createdAt: nowIso(),
});
peerErrorThrottle.set(job.id, Date.now());
}
continue;
}
throw error;
}
const peers = Object.values(peersResponse.peers || {});
let allowIpConnected = false;
const banned: string[] = [];
for (const peer of peers) {
if (peer.ip === job.allowIp) {
allowIpConnected = true;
continue;
}
if (caps?.hasBanEndpoint) {
const peerKey = `${peer.ip}:${peer.port}`;
banned.push(peerKey);
}
}
if (banned.length > 0 && caps?.hasBanEndpoint) {
await qbit.banPeers(banned);
job.bans.bannedIps.push(...banned.map((peer) => peer.split(":")[0]));
job.bans.lastBanAt = nowIso();
emitJobLog({
jobId: job.id,
level: "WARN",
message: `Banned ${banned.length} peers`,
createdAt: nowIso(),
});
await appendAuditLog({
level: "WARN",
event: "PEER_BANNED",
message: `Job ${job.id}: banned ${banned.length} peers`,
});
}
if (!caps?.hasBanEndpoint) {
emitJobLog({
jobId: job.id,
level: "WARN",
message: "Peer ban unsupported; warn-only enforcement",
createdAt: nowIso(),
});
}
if (!allowIpConnected) {
emitJobLog({
jobId: job.id,
level: "WARN",
message: "Allowed IP not connected",
createdAt: nowIso(),
});
}
job.updatedAt = nowIso();
emitJobMetrics(job);
} catch (error) {
emitJobLog({
jobId: job.id,
level: "ERROR",
message: "Enforcement error; continuing.",
createdAt: nowIso(),
});
}
}
await writeDb(db);
} catch (error) {
// Keep worker alive on errors.
}
}, intervalMs);
};

112
apps/server/src/index.ts Normal file
View File

@@ -0,0 +1,112 @@
import express from "express";
import http from "node:http";
import path from "node:path";
import cookieParser from "cookie-parser";
import cors from "cors";
import { config, isDev } from "./config"
import { ensureDataPaths } from "./storage/paths"
import { initAuth } from "./auth/auth.service"
import authRoutes from "./auth/auth.routes"
import { requireAuth } from "./auth/auth.middleware"
import qbitRoutes from "./qbit/qbit.routes"
import torrentRoutes from "./torrent/torrent.routes"
import loopRoutes from "./loop/loop.routes"
import profilesRoutes from "./loop/profiles.routes"
import statusRoutes from "./status/status.routes"
import timerRoutes from "./timer/timer.routes"
import { QbitClient } from "./qbit/qbit.client"
import { detectCapabilities } from "./qbit/qbit.capabilities"
import { setQbitClient, setQbitCapabilities } from "./qbit/qbit.context"
import { setQbitStatus } from "./status/status.service"
import { createSocketServer } from "./realtime/socket"
import { initEmitter, emitQbitHealth } from "./realtime/emitter"
import { startLoopScheduler } from "./loop/loop.scheduler"
import { startEnforcementWorker } from "./enforcement/enforcement.worker"
import { startTimerWorker } from "./timer/timer.worker"
import { logger } from "./utils/logger"
process.on("unhandledRejection", (reason) => {
logger.error({ reason }, "Unhandled promise rejection");
});
process.on("uncaughtException", (error) => {
logger.error({ error }, "Uncaught exception");
});
let serverStarted = false;
const bootstrap = async () => {
await ensureDataPaths();
await initAuth();
const app = express();
app.use(cookieParser());
app.use(express.json());
if (isDev) {
const fallbackOrigin = `http://localhost:${config.webPort}`;
const origins = [config.webOrigin || fallbackOrigin, fallbackOrigin];
app.use(
cors({
origin: origins,
credentials: true,
})
);
}
app.use("/api/auth", authRoutes);
app.use("/api/qbit", requireAuth, qbitRoutes);
app.use("/api/torrent", requireAuth, torrentRoutes);
app.use("/api/loop", requireAuth, loopRoutes);
app.use("/api/profiles", requireAuth, profilesRoutes);
app.use("/api/status", requireAuth, statusRoutes);
app.use("/api/timer", requireAuth, timerRoutes);
if (!isDev) {
app.use(express.static(config.webPublicDir));
app.get("*", (req, res, next) => {
if (req.path.startsWith("/api")) {
return next();
}
return res.sendFile(path.join(config.webPublicDir, "index.html"));
});
}
const server = http.createServer(app);
const io = createSocketServer(server);
initEmitter(io);
const qbit = new QbitClient();
setQbitClient(qbit);
try {
const caps = await detectCapabilities(qbit);
setQbitCapabilities(caps);
setQbitStatus({ ok: true, version: caps.version, capabilities: caps });
emitQbitHealth({ ok: true, version: caps.version, capabilities: caps });
} catch (error) {
logger.error({ error }, "Failed to connect to qBittorrent");
setQbitStatus({ ok: false, lastError: (error as Error).message });
emitQbitHealth({ ok: false, lastError: (error as Error).message });
}
startLoopScheduler(qbit, config.pollIntervalMs);
startEnforcementWorker(config.enforceIntervalMs);
startTimerWorker(qbit, config.timerPollMs);
server.listen(config.port, () => {
serverStarted = true;
logger.info(`q-buffer server listening on ${config.port}`);
});
};
const startWithRetry = () => {
bootstrap().catch((error) => {
logger.error({ error }, "Failed to start server");
if (!serverStarted) {
setTimeout(startWithRetry, 5000);
}
});
};
startWithRetry();

View File

@@ -0,0 +1,219 @@
import { randomUUID } from "node:crypto";
import { QbitClient } from "../qbit/qbit.client";
import { readDb, writeDb } from "../storage/jsondb";
import { LoopJob } from "../types";
import { nowIso } from "../utils/time";
import { appendAuditLog, logger } from "../utils/logger";
import { emitJobLog, emitJobMetrics } from "../realtime/emitter";
import { config } from "../config";
const logJob = async (jobId: string, level: "INFO" | "WARN" | "ERROR", message: string, event?: string) => {
const createdAt = nowIso();
emitJobLog({ jobId, level, message, createdAt });
if (event) {
await appendAuditLog({ level, event: event as any, message });
}
};
export const createLoopJob = async (
input: {
torrentHash: string;
name: string;
sizeBytes: number;
magnet?: string;
torrentFilePath?: string;
allowIp: string;
targetLoops: number;
delayMs: number;
}
): Promise<LoopJob> => {
const now = nowIso();
const job: LoopJob = {
id: randomUUID(),
torrentHash: input.torrentHash,
name: input.name,
sizeBytes: input.sizeBytes,
magnet: input.magnet,
torrentFilePath: input.torrentFilePath,
allowIp: input.allowIp,
targetLoops: input.targetLoops,
doneLoops: 0,
delayMs: input.delayMs,
deleteDataBetweenLoops: true,
enforcementMode: "aggressive-soft",
status: "RUNNING",
currentRun: {
startedAt: now,
lastProgress: 0,
lastProgressAt: now,
downloadedThisRunBytes: 0,
avgSpeed: 0,
},
totals: {
totalDownloadedBytes: 0,
totalTimeMs: 0,
},
bans: {
bannedIps: [],
},
createdAt: now,
updatedAt: now,
};
const db = await readDb();
db.loopJobs.push(job);
await writeDb(db);
await logJob(job.id, "INFO", `Loop job started for ${job.name}`, "JOB_STARTED");
return job;
};
export const stopLoopJob = async (jobId: string) => {
const db = await readDb();
const job = db.loopJobs.find((j) => j.id === jobId);
if (!job) {
return null;
}
job.status = "STOPPED";
job.nextRunAt = undefined;
job.currentRun = undefined;
job.updatedAt = nowIso();
await writeDb(db);
await logJob(job.id, "WARN", "Loop job stopped by user");
return job;
};
export const updateJob = async (job: LoopJob) => {
const db = await readDb();
const index = db.loopJobs.findIndex((j) => j.id === job.id);
if (index === -1) {
return null;
}
db.loopJobs[index] = job;
await writeDb(db);
emitJobMetrics(job);
return job;
};
export const tickLoopJobs = async (
qbit: QbitClient,
torrents: { hash: string; progress: number; state: string; dlspeed: number }[]
) => {
const db = await readDb();
let changed = false;
for (const job of db.loopJobs) {
if (job.status === "RUNNING") {
const torrent = torrents.find((t) => t.hash === job.torrentHash);
if (!torrent) {
try {
if (job.torrentFilePath) {
await qbit.addTorrentByFile(job.torrentFilePath);
} else if (job.magnet) {
await qbit.addTorrentByMagnet(job.magnet);
}
await logJob(job.id, "WARN", "Torrent missing, re-added", "JOB_RESTARTED");
} catch (error) {
job.status = "ERROR";
job.lastError = "Failed to re-add torrent";
await logJob(job.id, "ERROR", job.lastError);
}
job.updatedAt = nowIso();
changed = true;
continue;
}
job.currentRun = job.currentRun ?? {
startedAt: nowIso(),
lastProgress: 0,
lastProgressAt: nowIso(),
downloadedThisRunBytes: 0,
avgSpeed: 0,
};
if (torrent.progress > job.currentRun.lastProgress) {
job.currentRun.lastProgress = torrent.progress;
job.currentRun.lastProgressAt = nowIso();
job.currentRun.stalledSince = undefined;
}
job.currentRun.avgSpeed = torrent.dlspeed;
job.updatedAt = nowIso();
const stalledState = /stalled|meta/i.test(torrent.state);
if (stalledState) {
if (!job.currentRun.stalledSince) {
job.currentRun.stalledSince = nowIso();
}
const lastProgressAt = job.currentRun.lastProgressAt
? new Date(job.currentRun.lastProgressAt).getTime()
: 0;
if (Date.now() - lastProgressAt > config.stalledRecoveryMs) {
await logJob(
job.id,
"WARN",
"Stalled recovery: torrent will be removed and re-added"
);
try {
await qbit.deleteTorrent(job.torrentHash, true);
} catch (error) {
logger.error({ error }, "Failed to delete stalled torrent");
}
job.status = "WAITING_DELAY";
job.nextRunAt = new Date(Date.now() + job.delayMs).toISOString();
job.updatedAt = nowIso();
changed = true;
continue;
}
}
if (torrent.progress >= 1 && /UP|uploading|stalledUP|pausedUP|queuedUP/i.test(torrent.state)) {
job.doneLoops += 1;
job.totals.totalDownloadedBytes += job.sizeBytes;
await logJob(job.id, "INFO", `Loop ${job.doneLoops} completed`, "JOB_COMPLETED_LOOP");
try {
await qbit.deleteTorrent(job.torrentHash, true);
} catch (error) {
logger.error({ error }, "Failed to delete torrent");
}
if (job.doneLoops >= job.targetLoops) {
job.status = "COMPLETED";
await logJob(job.id, "INFO", "All loops completed", "JOB_COMPLETED_ALL");
} else {
job.status = "WAITING_DELAY";
job.nextRunAt = new Date(Date.now() + job.delayMs).toISOString();
}
}
changed = true;
}
if (job.status === "WAITING_DELAY") {
const nextRunAt = job.nextRunAt ? new Date(job.nextRunAt).getTime() : 0;
if (Date.now() >= nextRunAt) {
try {
if (job.torrentFilePath) {
await qbit.addTorrentByFile(job.torrentFilePath);
} else if (job.magnet) {
await qbit.addTorrentByMagnet(job.magnet);
}
job.status = "RUNNING";
job.currentRun = {
startedAt: nowIso(),
lastProgress: 0,
lastProgressAt: nowIso(),
downloadedThisRunBytes: 0,
avgSpeed: 0,
};
await logJob(job.id, "INFO", "Loop restarted", "JOB_RESTARTED");
} catch (error) {
job.status = "ERROR";
job.lastError = "Failed to re-add torrent after delay";
await logJob(job.id, "ERROR", job.lastError);
}
job.updatedAt = nowIso();
changed = true;
}
}
}
if (changed) {
await writeDb(db);
db.loopJobs.forEach((job) => emitJobMetrics(job));
}
};

View File

@@ -0,0 +1,151 @@
import { Router } from "express";
import fs from "node:fs/promises";
import path from "node:path";
import { getQbitCapabilities, getQbitClient } from "../qbit/qbit.context";
import { readDb } from "../storage/jsondb";
import { createLoopJob, stopLoopJob } from "./loop.engine";
import { dryRunSchema, loopStartSchema } from "../utils/validators";
import { getArchiveStatus } from "../torrent/torrent.archive";
import { config } from "../config";
import { setArchiveStatus } from "../torrent/torrent.archive";
import { nowIso } from "../utils/time";
const router = Router();
router.post("/start", async (req, res) => {
const parsed = loopStartSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: parsed.error.flatten() });
}
const { hash, allowIp, targetLoops, delayMs } = parsed.data;
const db = await readDb();
if (targetLoops > db.settings.maxLoopLimit) {
return res.status(400).json({ error: "Target loops exceed max limit" });
}
const qbit = getQbitClient();
const torrents = await qbit.getTorrentsInfo();
const torrent = torrents.find((t) => t.hash === hash);
if (!torrent) {
return res.status(404).json({ error: "Torrent not found" });
}
let archive = await getArchiveStatus(hash);
if (!archive?.torrentFilePath) {
try {
const buffer = await qbit.exportTorrent(hash);
const targetPath = path.join(config.torrentArchiveDir, `${hash}.torrent`);
await fs.writeFile(targetPath, buffer);
archive = await setArchiveStatus({
hash,
status: "READY",
torrentFilePath: targetPath,
source: "exported",
updatedAt: nowIso(),
});
} catch (error) {
return res.status(400).json({
error: "Arşiv yok ve export başarısız. Lütfen Advanced bölümünden .torrent yükleyin.",
});
}
}
try {
await fs.access(archive.torrentFilePath);
} catch (error) {
return res.status(400).json({
error: "Arşiv dosyası bulunamadı. Lütfen tekrar yükleyin.",
});
}
const job = await createLoopJob({
torrentHash: hash,
name: torrent.name,
sizeBytes: torrent.size,
magnet: undefined,
torrentFilePath: archive?.torrentFilePath,
allowIp,
targetLoops,
delayMs,
});
res.json(job);
});
router.post("/stop/:jobId", async (req, res) => {
const { jobId } = req.params;
const job = await stopLoopJob(jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
try {
const qbit = getQbitClient();
await qbit.deleteTorrent(job.torrentHash, true);
} catch (error) {
// Best-effort delete
}
res.json(job);
});
router.post("/stop-by-hash", async (req, res) => {
const { hash } = req.body ?? {};
if (!hash) {
return res.status(400).json({ error: "Missing hash" });
}
const db = await readDb();
const job = db.loopJobs.find((j) => j.torrentHash === hash);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
const stopped = await stopLoopJob(job.id);
try {
const qbit = getQbitClient();
await qbit.deleteTorrent(hash, true);
} catch (error) {
// Best-effort delete
}
res.json(stopped);
});
router.post("/dry-run", async (req, res) => {
const parsed = dryRunSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: parsed.error.flatten() });
}
const { hash } = parsed.data;
const qbit = getQbitClient();
const caps = getQbitCapabilities();
const torrents = await qbit.getTorrentsInfo();
const torrent = torrents.find((t) => t.hash === hash);
if (!torrent) {
return res.status(404).json({ error: "Torrent not found" });
}
const archive = await getArchiveStatus(hash);
res.json({
ok: true,
qbitVersion: caps?.version,
capabilities: caps,
hasMagnet: Boolean(torrent.magnet_uri),
archiveStatus: archive?.status ?? "MISSING",
warnings: [
caps?.hasPeersEndpoint ? null : "Peer listing unsupported",
caps?.hasBanEndpoint ? null : "Peer ban unsupported; warn-only enforcement",
torrent.magnet_uri ? null : "Magnet unavailable; upload .torrent recommended",
].filter(Boolean),
});
});
router.get("/jobs", async (_req, res) => {
const db = await readDb();
res.json(db.loopJobs);
});
router.get("/job/:jobId", async (req, res) => {
const db = await readDb();
const job = db.loopJobs.find((j) => j.id === req.params.jobId);
if (!job) {
return res.status(404).json({ error: "Job not found" });
}
res.json(job);
});
router.get("/logs/:jobId", async (req, res) => {
res.json({ jobId: req.params.jobId, logs: [] });
});
export default router;

View File

@@ -0,0 +1,26 @@
import { QbitClient } from "../qbit/qbit.client"
import { tickLoopJobs } from "./loop.engine"
import { getStatusSnapshot, refreshJobsStatus, setTorrentsStatus } from "../status/status.service"
import { emitStatusUpdate } from "../realtime/emitter"
import { logger } from "../utils/logger"
export const startLoopScheduler = (qbit: QbitClient, intervalMs: number) => {
setInterval(async () => {
try {
const torrents = await qbit.getTorrentsInfo();
const transfer = await qbit.getTransferInfo();
setTorrentsStatus(torrents, transfer);
await tickLoopJobs(qbit, torrents);
const jobs = await refreshJobsStatus();
const current = await getStatusSnapshot();
emitStatusUpdate({
qbit: { ...current.qbit, ok: true },
torrents,
transfer,
jobs,
});
} catch (error) {
logger.error({ error }, "Loop scheduler tick failed");
}
}, intervalMs);
};

View File

@@ -0,0 +1,12 @@
import { LoopJob } from "../types"
export interface LoopStartInput {
hash: string;
allowIp: string;
targetLoops: number;
delayMs: number;
}
export interface LoopEngineContext {
jobs: LoopJob[];
}

View File

@@ -0,0 +1,74 @@
import { Router } from "express";
import { randomUUID } from "node:crypto";
import { readDb, writeDb } from "../storage/jsondb"
import { profileSchema } from "../utils/validators"
import { nowIso } from "../utils/time"
const router = Router();
router.get("/", async (_req, res) => {
const db = await readDb();
res.json(db.profiles);
});
router.post("/", async (req, res) => {
const parsed = profileSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: parsed.error.flatten() });
}
const db = await readDb();
const profile = {
id: randomUUID(),
createdAt: nowIso(),
...parsed.data,
};
db.profiles.push(profile);
await writeDb(db);
res.json(profile);
});
router.put("/:profileId", async (req, res) => {
const parsed = profileSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: parsed.error.flatten() });
}
const db = await readDb();
const index = db.profiles.findIndex((p) => p.id === req.params.profileId);
if (index === -1) {
return res.status(404).json({ error: "Profile not found" });
}
db.profiles[index] = {
...db.profiles[index],
...parsed.data,
};
await writeDb(db);
res.json(db.profiles[index]);
});
router.delete("/:profileId", async (req, res) => {
const db = await readDb();
const next = db.profiles.filter((p) => p.id !== req.params.profileId);
if (next.length === db.profiles.length) {
return res.status(404).json({ error: "Profile not found" });
}
db.profiles = next;
await writeDb(db);
res.json({ ok: true });
});
router.post("/apply", async (req, res) => {
const { profileId, hash } = req.body ?? {};
const db = await readDb();
const profile = db.profiles.find((p) => p.id === profileId);
if (!profile) {
return res.status(404).json({ error: "Profile not found" });
}
res.json({
hash,
allowIp: profile.allowIp,
delayMs: profile.delayMs,
targetLoops: profile.targetLoops,
});
});
export default router;

View File

@@ -0,0 +1,30 @@
import { QbitClient } from "./qbit.client"
import { QbitCapabilities } from "./qbit.types"
export const detectCapabilities = async (
client: QbitClient
): Promise<QbitCapabilities> => {
const version = await client.getVersion();
let hasPeersEndpoint = true;
let hasBanEndpoint = true;
try {
await client.getTorrentPeers("__probe__");
} catch (error) {
const status = (error as any)?.response?.status;
if (status !== 400 && status !== 404) {
hasPeersEndpoint = false;
}
}
try {
await client.banPeers(["127.0.0.1:1"]);
} catch (error) {
const status = (error as any)?.response?.status;
if (status !== 400 && status !== 404) {
hasBanEndpoint = false;
}
}
return { version, hasPeersEndpoint, hasBanEndpoint };
};

View File

@@ -0,0 +1,177 @@
import axios, { AxiosInstance } from "axios";
import { CookieJar } from "tough-cookie";
import { wrapper } from "axios-cookiejar-support";
import FormData from "form-data";
import fs from "node:fs";
import { config } from "../config"
import { logger } from "../utils/logger"
import {
QbitPeerList,
QbitTorrentInfo,
QbitTorrentProperties,
QbitTransferInfo,
} from "./qbit.types"
export class QbitClient {
private client: AxiosInstance;
private jar: CookieJar;
private loggedIn = false;
constructor() {
this.jar = new CookieJar();
this.client = wrapper(
axios.create({
baseURL: config.qbitBaseUrl,
jar: this.jar,
withCredentials: true,
})
);
}
async login(): Promise<void> {
if (!config.qbitBaseUrl) {
throw new Error("QBIT_BASE_URL missing");
}
const form = new URLSearchParams();
form.append("username", config.qbitUsername);
form.append("password", config.qbitPassword);
await this.client.post("/api/v2/auth/login", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
});
this.loggedIn = true;
}
private async request<T>(fn: () => Promise<T>): Promise<T> {
try {
if (!this.loggedIn) {
await this.login();
}
return await fn();
} catch (error) {
if (
axios.isAxiosError(error) &&
(error.response?.status === 401 || error.response?.status === 403)
) {
logger.warn("qBittorrent session expired, re-login");
this.loggedIn = false;
await this.login();
return await fn();
}
throw error;
}
}
async getVersion(): Promise<string> {
const response = await this.request(() =>
this.client.get<string>("/api/v2/app/version")
);
return response.data;
}
async getTorrentsInfo(): Promise<QbitTorrentInfo[]> {
const response = await this.request(() =>
this.client.get<QbitTorrentInfo[]>("/api/v2/torrents/info", {
params: { filter: "all" },
})
);
return response.data;
}
async getTransferInfo(): Promise<QbitTransferInfo> {
const response = await this.request(() =>
this.client.get<QbitTransferInfo>("/api/v2/transfer/info")
);
return response.data;
}
async getTorrentProperties(hash: string): Promise<QbitTorrentProperties> {
const response = await this.request(() =>
this.client.get<QbitTorrentProperties>("/api/v2/torrents/properties", {
params: { hash },
})
);
return response.data;
}
async getTorrentPeers(hash: string): Promise<QbitPeerList> {
const response = await this.request(() =>
this.client.get<QbitPeerList>("/api/v2/sync/torrentPeers", {
params: { hash },
})
);
return response.data;
}
async exportTorrent(hash: string): Promise<Buffer> {
const response = await this.request(() =>
this.client.get<ArrayBuffer>("/api/v2/torrents/export", {
params: { hashes: hash },
responseType: "arraybuffer",
})
);
return Buffer.from(response.data);
}
async addTorrentByMagnet(magnet: string, options: Record<string, string> = {}) {
const form = new URLSearchParams();
form.append("urls", magnet);
Object.entries(options).forEach(([key, value]) => form.append(key, value));
await this.request(() =>
this.client.post("/api/v2/torrents/add", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
})
);
}
async addTorrentByFile(filePath: string, options: Record<string, string> = {}) {
const form = new FormData();
form.append("torrents", fs.createReadStream(filePath));
Object.entries(options).forEach(([key, value]) => form.append(key, value));
await this.request(() =>
this.client.post("/api/v2/torrents/add", form, {
headers: form.getHeaders(),
})
);
}
async deleteTorrent(hash: string, deleteFiles = true) {
const form = new URLSearchParams();
form.append("hashes", hash);
form.append("deleteFiles", deleteFiles ? "true" : "false");
await this.request(() =>
this.client.post("/api/v2/torrents/delete", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
})
);
}
async pauseTorrent(hash: string) {
const form = new URLSearchParams();
form.append("hashes", hash);
await this.request(() =>
this.client.post("/api/v2/torrents/pause", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
})
);
}
async resumeTorrent(hash: string) {
const form = new URLSearchParams();
form.append("hashes", hash);
await this.request(() =>
this.client.post("/api/v2/torrents/resume", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
})
);
}
async banPeers(peers: string[]) {
const form = new URLSearchParams();
form.append("peers", peers.join("|"));
await this.request(() =>
this.client.post("/api/v2/transfer/banPeers", form, {
headers: { "Content-Type": "application/x-www-form-urlencoded" },
})
);
}
}

View File

@@ -0,0 +1,22 @@
import { QbitClient } from "./qbit.client"
import { QbitCapabilities } from "./qbit.types"
let client: QbitClient | null = null;
let capabilities: QbitCapabilities | null = null;
export const setQbitClient = (instance: QbitClient) => {
client = instance;
};
export const getQbitClient = () => {
if (!client) {
throw new Error("Qbit client not initialized");
}
return client;
};
export const setQbitCapabilities = (caps: QbitCapabilities) => {
capabilities = caps;
};
export const getQbitCapabilities = () => capabilities;

View File

@@ -0,0 +1,30 @@
import { Router } from "express";
import { getQbitClient } from "./qbit.context"
const router = Router();
router.get("/torrents", async (_req, res) => {
const qbit = getQbitClient();
const torrents = await qbit.getTorrentsInfo();
res.json(torrents);
});
router.get("/transfer", async (_req, res) => {
const qbit = getQbitClient();
const transfer = await qbit.getTransferInfo();
res.json(transfer);
});
router.get("/torrent/:hash", async (req, res) => {
const qbit = getQbitClient();
const props = await qbit.getTorrentProperties(req.params.hash);
res.json(props);
});
router.delete("/torrent/:hash", async (req, res) => {
const qbit = getQbitClient();
await qbit.deleteTorrent(req.params.hash, true);
res.json({ ok: true });
});
export default router;

View File

@@ -0,0 +1,51 @@
export interface QbitTorrentInfo {
hash: string;
name: string;
size: number;
progress: number;
dlspeed: number;
state: string;
magnet_uri?: string;
completed?: number;
tags?: string;
category?: string;
tracker?: string;
seeding_time?: number;
uploaded?: number;
}
export interface QbitTransferInfo {
dl_info_speed: number;
dl_info_data: number;
up_info_speed: number;
up_info_data: number;
connection_status: string;
}
export interface QbitTorrentProperties {
save_path?: string;
completion_on?: number;
comment?: string;
total_size?: number;
piece_size?: number;
}
export interface QbitPeerList {
peers: Record<
string,
{
ip: string;
port: number;
client: string;
progress: number;
dl_speed: number;
up_speed: number;
}
>;
}
export interface QbitCapabilities {
version: string;
hasPeersEndpoint: boolean;
hasBanEndpoint: boolean;
}

View File

@@ -0,0 +1,43 @@
import { Server } from "socket.io";
import { EVENTS } from "./events";
import { StatusSnapshot } from "../status/status.service";
import { LoopJob, TimerLog, TimerSummary } from "../types";
let io: Server | null = null;
export const initEmitter = (server: Server) => {
io = server;
};
export const emitStatusSnapshot = (snapshot: StatusSnapshot) => {
io?.emit(EVENTS.STATUS_SNAPSHOT, snapshot);
};
export const emitStatusUpdate = (snapshot: StatusSnapshot) => {
io?.emit(EVENTS.STATUS_UPDATE, snapshot);
};
export const emitJobMetrics = (job: LoopJob) => {
io?.emit(EVENTS.JOB_METRICS, job);
};
export const emitJobLog = (payload: {
jobId: string;
level: "INFO" | "WARN" | "ERROR";
message: string;
createdAt: string;
}) => {
io?.emit(EVENTS.JOB_LOG, payload);
};
export const emitQbitHealth = (payload: StatusSnapshot["qbit"]) => {
io?.emit(EVENTS.QBIT_HEALTH, payload);
};
export const emitTimerLog = (payload: TimerLog) => {
io?.emit(EVENTS.TIMER_LOG, payload);
};
export const emitTimerSummary = (payload: TimerSummary) => {
io?.emit(EVENTS.TIMER_SUMMARY, payload);
};

View File

@@ -0,0 +1,9 @@
export const EVENTS = {
STATUS_SNAPSHOT: "status:snapshot",
STATUS_UPDATE: "status:update",
JOB_METRICS: "job:metrics",
JOB_LOG: "job:log",
QBIT_HEALTH: "qbit:health",
TIMER_LOG: "timer:log",
TIMER_SUMMARY: "timer:summary",
};

View File

@@ -0,0 +1,59 @@
import { Server } from "socket.io";
import http from "node:http";
import { verifyToken } from "../auth/auth.service";
import { getStatusSnapshot } from "../status/status.service";
import { EVENTS } from "./events";
import { config, isDev } from "../config";
const parseCookies = (cookieHeader?: string) => {
const cookies: Record<string, string> = {};
if (!cookieHeader) {
return cookies;
}
cookieHeader.split(";").forEach((part) => {
const [key, ...rest] = part.trim().split("=");
cookies[key] = decodeURIComponent(rest.join("="));
});
return cookies;
};
export const createSocketServer = (server: http.Server) => {
const io = new Server(server, {
cors: isDev
? {
origin: true,
credentials: true,
}
: undefined,
});
io.use((socket, next) => {
const authToken = socket.handshake.auth?.token as string | undefined;
if (authToken) {
try {
verifyToken(authToken);
return next();
} catch (error) {
return next(new Error("Unauthorized"));
}
}
const cookies = parseCookies(socket.request.headers.cookie);
const token = cookies["qbuffer_token"];
if (!token) {
return next(new Error("Unauthorized"));
}
try {
verifyToken(token);
return next();
} catch (error) {
return next(new Error("Unauthorized"));
}
});
io.on("connection", async (socket) => {
const snapshot = await getStatusSnapshot();
socket.emit(EVENTS.STATUS_SNAPSHOT, snapshot);
});
return io;
};

View File

@@ -0,0 +1,11 @@
import { Router } from "express";
import { getStatusSnapshot } from "./status.service"
const router = Router();
router.get("/", async (_req, res) => {
const snapshot = await getStatusSnapshot();
res.json(snapshot);
});
export default router;

View File

@@ -0,0 +1,45 @@
import { DbSchema, LoopJob } from "../types"
import { QbitCapabilities, QbitTorrentInfo, QbitTransferInfo } from "../qbit/qbit.types"
import { readDb } from "../storage/jsondb"
export interface StatusSnapshot {
qbit: {
ok: boolean;
version?: string;
capabilities?: QbitCapabilities;
lastError?: string;
};
torrents: QbitTorrentInfo[];
transfer?: QbitTransferInfo;
jobs: LoopJob[];
}
let snapshot: StatusSnapshot = {
qbit: { ok: false },
torrents: [],
transfer: undefined,
jobs: [],
};
export const setQbitStatus = (status: StatusSnapshot["qbit"]) => {
snapshot.qbit = status;
};
export const setTorrentsStatus = (
torrents: QbitTorrentInfo[],
transfer?: QbitTransferInfo
) => {
snapshot.torrents = torrents;
snapshot.transfer = transfer;
};
export const refreshJobsStatus = async () => {
const db: DbSchema = await readDb();
snapshot.jobs = db.loopJobs;
return snapshot.jobs;
};
export const getStatusSnapshot = async (): Promise<StatusSnapshot> => {
await refreshJobsStatus();
return snapshot;
};

View File

@@ -0,0 +1,90 @@
import fs from "node:fs/promises";
import path from "node:path";
import { config } from "../config"
import { DbSchema } from "../types"
import { Mutex } from "./mutex"
const mutex = new Mutex();
const defaultDb = (): DbSchema => ({
users: [],
settings: {
pollIntervalMs: config.pollIntervalMs,
enforceIntervalMs: config.enforceIntervalMs,
defaultDelayMs: config.defaultDelayMs,
maxLoopLimit: config.maxLoopLimit,
},
loopJobs: [],
profiles: [],
auditLogs: [],
archives: {},
timerRules: [],
timerLogs: [],
timerSummary: {
totalDeleted: 0,
totalSeededSeconds: 0,
totalUploadedBytes: 0,
updatedAt: new Date().toISOString(),
},
});
const rotateBackups = async (dbPath: string) => {
const dir = path.dirname(dbPath);
const base = path.basename(dbPath);
for (let i = 2; i >= 0; i -= 1) {
const src = path.join(dir, `${base}.bak${i}`);
const dest = path.join(dir, `${base}.bak${i + 1}`);
try {
await fs.rename(src, dest);
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
}
try {
await fs.copyFile(dbPath, path.join(dir, `${base}.bak0`));
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
};
const writeRaw = async (data: DbSchema) => {
await rotateBackups(config.dbPath);
const tempPath = `${config.dbPath}.tmp`;
await fs.writeFile(tempPath, JSON.stringify(data, null, 2), "utf-8");
await fs.rename(tempPath, config.dbPath);
};
export const readDb = async (): Promise<DbSchema> => {
return mutex.run(async () => {
try {
const content = await fs.readFile(config.dbPath, "utf-8");
const parsed = JSON.parse(content) as DbSchema;
parsed.timerRules ??= [];
parsed.timerLogs ??= [];
parsed.timerSummary ??= {
totalDeleted: 0,
totalSeededSeconds: 0,
totalUploadedBytes: 0,
updatedAt: new Date().toISOString(),
};
return parsed;
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
const initial = defaultDb();
await writeRaw(initial);
return initial;
}
});
};
export const writeDb = async (data: DbSchema): Promise<void> => {
await mutex.run(async () => {
await writeRaw(data);
});
};

View File

@@ -0,0 +1,12 @@
export class Mutex {
private current: Promise<void> = Promise.resolve();
async run<T>(fn: () => Promise<T>): Promise<T> {
const next = this.current.then(fn, fn);
this.current = next.then(
() => undefined,
() => undefined
);
return next;
}
}

View File

@@ -0,0 +1,7 @@
import fs from "node:fs/promises";
import { config } from "../config"
export const ensureDataPaths = async () => {
await fs.mkdir(config.dataDir, { recursive: true });
await fs.mkdir(config.torrentArchiveDir, { recursive: true });
};

View File

@@ -0,0 +1,58 @@
import { Router } from "express";
import { randomUUID } from "node:crypto";
import { readDb, writeDb } from "../storage/jsondb";
import { TimerRule } from "../types";
import { nowIso } from "../utils/time";
import { z } from "zod";
const router = Router();
const ruleSchema = z.object({
tags: z.array(z.string().min(1)).min(1),
seedLimitSeconds: z.number().int().min(60).max(60 * 60 * 24 * 365),
});
router.get("/rules", async (_req, res) => {
const db = await readDb();
res.json(db.timerRules ?? []);
});
router.post("/rules", async (req, res) => {
const parsed = ruleSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: parsed.error.flatten() });
}
const db = await readDb();
const rule: TimerRule = {
id: randomUUID(),
tags: parsed.data.tags,
seedLimitSeconds: parsed.data.seedLimitSeconds,
createdAt: nowIso(),
};
db.timerRules = [...(db.timerRules ?? []), rule];
await writeDb(db);
res.json(rule);
});
router.delete("/rules/:ruleId", async (req, res) => {
const db = await readDb();
const next = (db.timerRules ?? []).filter((rule) => rule.id !== req.params.ruleId);
if (next.length === (db.timerRules ?? []).length) {
return res.status(404).json({ error: "Rule not found" });
}
db.timerRules = next;
await writeDb(db);
res.json({ ok: true });
});
router.get("/logs", async (_req, res) => {
const db = await readDb();
res.json(db.timerLogs ?? []);
});
router.get("/summary", async (_req, res) => {
const db = await readDb();
res.json(db.timerSummary ?? null);
});
export default router;

View File

@@ -0,0 +1,4 @@
export interface TimerRuleInput {
tags: string[];
seedLimitSeconds: number;
}

View File

@@ -0,0 +1,94 @@
import { randomUUID } from "node:crypto";
import { QbitClient } from "../qbit/qbit.client";
import { readDb, writeDb } from "../storage/jsondb";
import { TimerLog, TimerSummary } from "../types";
import { emitTimerLog, emitTimerSummary } from "../realtime/emitter";
import { nowIso } from "../utils/time";
const MAX_LOGS = 2000;
const normalizeTags = (tags?: string, category?: string) => {
const tagList = tags ? tags.split(",").map((tag) => tag.trim()).filter(Boolean) : [];
if (category) {
tagList.push(category);
}
return Array.from(new Set(tagList.map((tag) => tag.toLowerCase())));
};
export const startTimerWorker = (qbit: QbitClient, intervalMs: number) => {
setInterval(async () => {
const db = await readDb();
const rules = db.timerRules ?? [];
if (rules.length === 0) {
return;
}
const torrents = await qbit.getTorrentsInfo();
let summary: TimerSummary =
db.timerSummary ?? {
totalDeleted: 0,
totalSeededSeconds: 0,
totalUploadedBytes: 0,
updatedAt: nowIso(),
};
const logs: TimerLog[] = [];
for (const torrent of torrents) {
const tags = normalizeTags(torrent.tags, torrent.category);
const addedOnMs = Number(torrent.added_on ?? 0) * 1000;
const matchingRules = rules.filter((rule) => {
const ruleCreatedAtMs = Date.parse(rule.createdAt);
if (Number.isFinite(ruleCreatedAtMs) && addedOnMs > 0) {
if (addedOnMs < ruleCreatedAtMs) {
return false;
}
}
return rule.tags.some((tag) => tags.includes(tag.toLowerCase()));
});
if (matchingRules.length === 0) {
continue;
}
const matchingRule = matchingRules.reduce((best, current) =>
current.seedLimitSeconds < best.seedLimitSeconds ? current : best
);
const seedingSeconds = Number(torrent.seeding_time ?? 0);
if (seedingSeconds < matchingRule.seedLimitSeconds) {
continue;
}
try {
await qbit.deleteTorrent(torrent.hash, true);
} catch (error) {
continue;
}
const logEntry: TimerLog = {
id: randomUUID(),
hash: torrent.hash,
name: torrent.name,
sizeBytes: torrent.size,
tracker: torrent.tracker,
tags,
category: torrent.category,
seedingTimeSeconds: seedingSeconds,
uploadedBytes: torrent.uploaded ?? 0,
deletedAt: nowIso(),
};
logs.push(logEntry);
summary = {
totalDeleted: summary.totalDeleted + 1,
totalSeededSeconds: summary.totalSeededSeconds + seedingSeconds,
totalUploadedBytes: summary.totalUploadedBytes + (torrent.uploaded ?? 0),
updatedAt: nowIso(),
};
emitTimerLog(logEntry);
emitTimerSummary(summary);
}
if (logs.length > 0) {
db.timerLogs = [...(db.timerLogs ?? []), ...logs].slice(-MAX_LOGS);
db.timerSummary = summary;
await writeDb(db);
}
}, intervalMs);
};

View File

@@ -0,0 +1,46 @@
import fs from "node:fs/promises";
import path from "node:path";
import { readDb, writeDb } from "../storage/jsondb";
import { ArchiveStatus } from "../types";
import { nowIso } from "../utils/time";
import { config } from "../config";
export const setArchiveStatus = async (status: ArchiveStatus) => {
const db = await readDb();
db.archives[status.hash] = status;
await writeDb(db);
return status;
};
export const getArchiveStatus = async (hash: string) => {
const db = await readDb();
const existing = db.archives[hash];
const torrentPath = path.join(config.torrentArchiveDir, `${hash}.torrent`);
try {
await fs.access(torrentPath);
if (!existing || existing.status !== "READY") {
const updated: ArchiveStatus = {
hash,
status: "READY",
torrentFilePath: torrentPath,
source: existing?.source ?? "manual",
updatedAt: nowIso(),
};
db.archives[hash] = updated;
await writeDb(db);
return updated;
}
} catch (error) {
// File does not exist; fall back to stored status.
}
return existing;
};
export const createPendingArchive = async (hash: string) => {
const status: ArchiveStatus = {
hash,
status: "PENDING",
updatedAt: nowIso(),
};
return setArchiveStatus(status);
};

View File

@@ -0,0 +1,41 @@
import fs from "node:fs/promises";
import path from "node:path";
import { config } from "../config";
import { logger } from "../utils/logger";
export const generateTorrentFile = async (
magnet: string,
hash: string
): Promise<string> => {
const targetPath = path.join(config.torrentArchiveDir, `${hash}.torrent`);
const { default: WebTorrent } = await import("webtorrent");
const client = new WebTorrent();
return new Promise((resolve, reject) => {
const torrent = client.add(magnet, { path: config.dataDir });
const timeout = setTimeout(() => {
client.destroy();
reject(new Error("Metadata fetch timeout"));
}, 120_000);
torrent.on("metadata", async () => {
clearTimeout(timeout);
try {
const buffer = torrent.torrentFile;
await fs.writeFile(targetPath, buffer);
resolve(targetPath);
} catch (error) {
reject(error);
} finally {
client.destroy();
}
});
torrent.on("error", (error) => {
logger.error({ error }, "Torrent metadata error");
clearTimeout(timeout);
client.destroy();
reject(error);
});
});
};

View File

@@ -0,0 +1,121 @@
import { Router } from "express";
import multer from "multer";
import path from "node:path";
import fs from "node:fs/promises";
import { getQbitClient } from "../qbit/qbit.context";
import { getArchiveStatus, setArchiveStatus } from "./torrent.archive";
import { nowIso } from "../utils/time";
import { appendAuditLog, logger } from "../utils/logger";
import { config } from "../config";
const router = Router();
const upload = multer({ dest: "/tmp" });
router.post("/select", async (req, res) => {
const { hash } = req.body ?? {};
if (!hash) {
return res.status(400).json({ error: "Missing hash" });
}
const existing = await getArchiveStatus(hash);
if (existing?.status === "READY") {
return res.json({ ok: true, hash, archive: existing });
}
const qbit = getQbitClient();
const torrents = await qbit.getTorrentsInfo();
const torrent = torrents.find((t) => t.hash === hash);
if (!torrent) {
return res.status(404).json({ error: "Torrent not found" });
}
await setArchiveStatus({
hash,
status: "MISSING",
updatedAt: nowIso(),
});
res.json({ ok: true, hash, archive: { status: "MISSING" } });
});
router.post("/archive/from-selected", async (req, res) => {
const { hash } = req.body ?? {};
if (!hash) {
return res.status(400).json({ error: "Missing hash" });
}
const existing = await getArchiveStatus(hash);
if (existing?.status === "READY") {
return res.json({ ok: true, torrentFilePath: existing.torrentFilePath, source: existing.source });
}
await setArchiveStatus({
hash,
status: "MISSING",
lastError: "Magnet export disabled; upload .torrent manually.",
updatedAt: nowIso(),
});
await appendAuditLog({
level: "WARN",
event: "ARCHIVE_FAIL",
message: `Archive generation disabled for ${hash}; manual upload required`,
});
return res.status(400).json({ error: "Magnet export disabled; upload .torrent manually." });
});
router.post("/archive/upload", upload.single("file"), async (req, res) => {
const { hash } = req.body ?? {};
if (!hash || !req.file) {
return res.status(400).json({ error: "Missing hash or file" });
}
const inputHash = String(hash).toLowerCase();
const buffer = await fs.readFile(req.file.path);
let warning: string | undefined;
try {
const { default: parseTorrent } = await import("parse-torrent");
const parsed = parseTorrent(buffer);
const infoHash = String(parsed.infoHash ?? "").toLowerCase();
if (infoHash && infoHash !== inputHash) {
await fs.unlink(req.file.path);
return res.status(400).json({
error: "Torrent hash uyuşmuyor. Doğru .torrent dosyasını seçin.",
expected: inputHash,
actual: infoHash,
});
}
} catch (error) {
warning = "Torrent dosyası okunamadı; yine de arşive kaydedildi.";
logger.warn({ error, hash: inputHash }, "Torrent parse failed; storing archive anyway");
}
const targetPath = path.join(config.torrentArchiveDir, `${hash}.torrent`);
await fs.writeFile(targetPath, buffer);
await fs.unlink(req.file.path);
await setArchiveStatus({
hash,
status: "READY",
torrentFilePath: targetPath,
source: "manual",
updatedAt: nowIso(),
});
try {
const qbit = getQbitClient();
await qbit.addTorrentByFile(targetPath);
return res.json({
ok: true,
torrentFilePath: targetPath,
added: true,
});
} catch (error) {
return res.json({
ok: true,
torrentFilePath: targetPath,
added: false,
});
}
});
router.get("/archive/status/:hash", async (req, res) => {
const status = await getArchiveStatus(req.params.hash);
if (!status) {
return res.json({ hash: req.params.hash, status: "MISSING" });
}
return res.json(status);
});
export default router;

131
apps/server/src/types.ts Normal file
View File

@@ -0,0 +1,131 @@
export type LoopStatus =
| "IDLE"
| "RUNNING"
| "WAITING_DELAY"
| "STOPPED"
| "ERROR"
| "COMPLETED";
export type EnforcementMode = "aggressive-soft";
export interface User {
username: string;
passwordHash: string;
createdAt: string;
}
export interface Settings {
pollIntervalMs: number;
enforceIntervalMs: number;
defaultDelayMs: number;
maxLoopLimit: number;
}
export interface LoopJob {
id: string;
torrentHash: string;
name: string;
sizeBytes: number;
magnet?: string;
torrentFilePath?: string;
allowIp: string;
targetLoops: number;
doneLoops: number;
delayMs: number;
deleteDataBetweenLoops: boolean;
enforcementMode: EnforcementMode;
status: LoopStatus;
currentRun?: {
startedAt: string;
lastProgress: number;
lastProgressAt?: string;
stalledSince?: string;
downloadedThisRunBytes: number;
avgSpeed: number;
};
totals: {
totalDownloadedBytes: number;
totalTimeMs: number;
};
bans: {
bannedIps: string[];
lastBanAt?: string;
};
nextRunAt?: string;
createdAt: string;
updatedAt: string;
lastError?: string;
}
export interface Profile {
id: string;
name: string;
allowIp: string;
delayMs: number;
targetLoops: number;
createdAt: string;
}
export interface TimerRule {
id: string;
tags: string[];
seedLimitSeconds: number;
createdAt: string;
}
export interface TimerLog {
id: string;
hash: string;
name: string;
sizeBytes: number;
tracker?: string;
tags: string[];
category?: string;
seedingTimeSeconds: number;
uploadedBytes: number;
deletedAt: string;
}
export interface TimerSummary {
totalDeleted: number;
totalSeededSeconds: number;
totalUploadedBytes: number;
updatedAt: string;
}
export interface AuditLog {
id: string;
level: "INFO" | "WARN" | "ERROR";
event:
| "JOB_STARTED"
| "JOB_COMPLETED_LOOP"
| "JOB_COMPLETED_ALL"
| "JOB_RESTARTED"
| "PEER_BANNED"
| "QBIT_RELOGIN"
| "ARCHIVE_SUCCESS"
| "ARCHIVE_FAIL";
message: string;
createdAt: string;
}
export interface ArchiveStatus {
hash: string;
status: "PENDING" | "READY" | "FAILED" | "MISSING";
torrentFilePath?: string;
source?: "manual" | "generated" | "exported";
lastError?: string;
updatedAt: string;
}
export interface DbSchema {
users: User[];
settings: Settings;
loopJobs: LoopJob[];
profiles: Profile[];
auditLogs: AuditLog[];
archives: Record<string, ArchiveStatus>;
timerRules?: TimerRule[];
timerLogs?: TimerLog[];
timerSummary?: TimerSummary;
}

View File

@@ -0,0 +1,34 @@
import fs from "node:fs/promises";
import { randomUUID } from "node:crypto";
import pino from "pino";
import { config } from "../config"
import { AuditLog } from "../types"
export const logger = pino({
level: process.env.LOG_LEVEL ?? "info",
});
export const appendAuditLog = async (
entry: Omit<AuditLog, "id" | "createdAt">
) => {
const logEntry: AuditLog = {
id: randomUUID(),
createdAt: new Date().toISOString(),
...entry,
};
try {
let existing: AuditLog[] = [];
try {
const content = await fs.readFile(config.logsPath, "utf-8");
existing = JSON.parse(content) as AuditLog[];
} catch (error) {
if ((error as NodeJS.ErrnoException).code !== "ENOENT") {
throw error;
}
}
const next = [...existing, logEntry].slice(-2000);
await fs.writeFile(config.logsPath, JSON.stringify(next, null, 2), "utf-8");
} catch (error) {
logger.error({ error }, "Failed to append audit log");
}
};

View File

@@ -0,0 +1,7 @@
export const nowIso = () => new Date().toISOString();
export const secondsBetween = (startIso: string, endIso: string) => {
const start = new Date(startIso).getTime();
const end = new Date(endIso).getTime();
return Math.max(0, (end - start) / 1000);
};

View File

@@ -0,0 +1,25 @@
import { z } from "zod";
export const allowIpSchema = z
.string()
.ip({ version: "v4" })
.or(z.string().ip({ version: "v6" }));
export const loopStartSchema = z.object({
hash: z.string().min(1),
allowIp: allowIpSchema,
targetLoops: z.number().int().min(1).max(1000),
delayMs: z.number().int().min(0).max(86_400_000),
});
export const dryRunSchema = z.object({
hash: z.string().min(1),
allowIp: allowIpSchema,
});
export const profileSchema = z.object({
name: z.string().min(1).max(64),
allowIp: allowIpSchema,
delayMs: z.number().int().min(0).max(86_400_000),
targetLoops: z.number().int().min(1).max(1000),
});