Files
ratebubble/src/services/admin.service.ts

495 lines
14 KiB
TypeScript

import prisma from '../config/database.js';
import redis from '../config/redis.js';
import { env } from '../config/env.js';
import { JobService } from './job.service.js';
import { MetricsService } from './metrics.service.js';
import { CacheService } from './cache.service.js';
import { ContentService } from './content.service.js';
import type { AdminActionResponse, AdminOverviewResponse } from '../types/index.js';
import { parseSupportedContentUrl } from '../utils/contentUrl.js';
const CACHE_PREFIX = 'content:';
const MAX_CACHE_KEYS_FOR_ANALYSIS = 1000;
function formatCacheKeyLabel(key: string): string {
return key.replace(CACHE_PREFIX, '');
}
function extractProviderIdFromCacheKey(key: string): { provider: string; id: string } | null {
const normalized = formatCacheKeyLabel(key);
const match = normalized.match(/^(netflix|primevideo):([A-Za-z0-9]+)$/);
if (!match) return null;
const provider = match[1];
const id = match[2];
if (!provider || !id) return null;
return { provider, id };
}
function extractProviderIdFromUrl(url: string): { provider: string; id: string } | null {
const parsed = parseSupportedContentUrl(url);
if (!parsed) return null;
return { provider: parsed.provider, id: parsed.id };
}
function parseRedisInfoValue(info: string, key: string): number | null {
const line = info
.split('\n')
.map((item) => item.trim())
.find((item) => item.startsWith(`${key}:`));
if (!line) return null;
const raw = line.slice(key.length + 1).trim();
const value = Number.parseInt(raw, 10);
return Number.isFinite(value) ? value : null;
}
async function collectCacheKeys(limit?: number): Promise<{ keys: string[]; sampled: boolean }> {
let cursor = '0';
const keys: string[] = [];
do {
const [nextCursor, batchKeys] = await redis.scan(
cursor,
'MATCH',
`${CACHE_PREFIX}*`,
'COUNT',
200
);
cursor = nextCursor;
keys.push(...batchKeys);
if (limit && keys.length >= limit) {
return { keys: keys.slice(0, limit), sampled: true };
}
} while (cursor !== '0');
return { keys, sampled: false };
}
export class AdminService {
static async getOverview(): Promise<AdminOverviewResponse> {
const now = new Date();
const oneDayAgo = new Date(now.getTime() - 24 * 60 * 60 * 1000);
const sevenDaysAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
const [
totalContent,
recent24h,
recent7d,
missingPlot,
missingAgeRating,
missingBackdrop,
groupedTypes,
groupedJobs,
recentFailedJobs,
recentFinishedJobs,
topGenreLinks,
{ keys: cacheKeys, sampled: cacheSampled },
metricsSnapshot,
redisMemoryInfo,
] = await Promise.all([
prisma.content.count(),
prisma.content.count({ where: { createdAt: { gte: oneDayAgo } } }),
prisma.content.count({ where: { createdAt: { gte: sevenDaysAgo } } }),
prisma.content.count({ where: { plot: null } }),
prisma.content.count({ where: { ageRating: null } }),
prisma.content.count({ where: { backdropUrl: null } }),
prisma.content.groupBy({ by: ['type'], _count: { type: true } }),
prisma.scrapeJob.groupBy({ by: ['status'], _count: { status: true } }),
prisma.scrapeJob.findMany({
where: { status: 'failed' },
orderBy: { updatedAt: 'desc' },
take: 8,
select: { id: true, url: true, error: true, updatedAt: true },
}),
prisma.scrapeJob.findMany({
where: { status: { in: ['completed', 'failed'] } },
orderBy: { updatedAt: 'desc' },
take: 300,
select: { createdAt: true, updatedAt: true },
}),
prisma.contentGenre.groupBy({
by: ['genreId'],
_count: { genreId: true },
orderBy: { _count: { genreId: 'desc' } },
take: 10,
}),
collectCacheKeys(MAX_CACHE_KEYS_FOR_ANALYSIS),
MetricsService.getSnapshot(),
redis.info('memory').catch(() => ''),
]);
const genreIds = topGenreLinks.map((item) => item.genreId);
const genres = genreIds.length
? await prisma.genre.findMany({
where: { id: { in: genreIds } },
select: { id: true, name: true },
})
: [];
const genreMap = new Map(genres.map((genre) => [genre.id, genre.name]));
const ttlPipeline = redis.pipeline();
const sizePipeline = redis.pipeline();
const valuePipeline = redis.pipeline();
for (const key of cacheKeys) {
ttlPipeline.ttl(key);
sizePipeline.strlen(key);
valuePipeline.get(key);
}
const [ttlResults, sizeResults, valueResults] = await Promise.all([
ttlPipeline.exec(),
sizePipeline.exec(),
valuePipeline.exec(),
]);
const ttlDistribution = {
expiredOrNoTtl: 0,
lessThan5Min: 0,
min5To30: 0,
min30Plus: 0,
};
const cacheProviderIds = Array.from(
new Set(
cacheKeys
.map((key) => extractProviderIdFromCacheKey(key))
.filter((item): item is { provider: string; id: string } => Boolean(item))
.map((item) => `${item.provider}:${item.id}`)
)
);
const relatedContent = cacheProviderIds.length
? await prisma.content.findMany({
where: {
OR: cacheProviderIds.map((providerId) => {
const [provider, id] = providerId.split(':');
if (provider === 'primevideo') {
return { url: { contains: `/detail/${id}` } };
}
return { url: { contains: `/title/${id}` } };
}),
},
select: {
url: true,
title: true,
},
})
: [];
const titleMap = new Map<string, string>();
for (const item of relatedContent) {
const parsed = extractProviderIdFromUrl(item.url);
if (parsed) {
const key = `${parsed.provider}:${parsed.id}`;
if (!titleMap.has(key)) {
titleMap.set(key, item.title);
}
}
}
const expiringSoon: {
key: string;
mediaTitle?: string | null;
cachedAt?: number | null;
ttlSeconds: number;
}[] = [];
let totalBytes = 0;
for (let i = 0; i < cacheKeys.length; i += 1) {
const ttlValue = Number(ttlResults?.[i]?.[1] ?? -2);
const sizeValue = Number(sizeResults?.[i]?.[1] ?? 0);
const safeSize = Number.isFinite(sizeValue) ? Math.max(0, sizeValue) : 0;
totalBytes += safeSize;
if (ttlValue <= 0) {
ttlDistribution.expiredOrNoTtl += 1;
} else if (ttlValue < 300) {
ttlDistribution.lessThan5Min += 1;
} else if (ttlValue <= 1800) {
ttlDistribution.min5To30 += 1;
} else {
ttlDistribution.min30Plus += 1;
}
if (ttlValue > 0) {
const formattedKey = formatCacheKeyLabel(cacheKeys[i] || '');
const providerId = extractProviderIdFromCacheKey(cacheKeys[i] || '');
const rawValue = valueResults?.[i]?.[1];
let cachedAt: number | null = null;
if (typeof rawValue === 'string') {
try {
const parsed = JSON.parse(rawValue) as { cachedAt?: unknown };
cachedAt = typeof parsed.cachedAt === 'number' ? parsed.cachedAt : null;
} catch {
cachedAt = null;
}
}
expiringSoon.push({
key: formattedKey,
mediaTitle: providerId
? titleMap.get(`${providerId.provider}:${providerId.id}`) ?? null
: null,
cachedAt,
ttlSeconds: ttlValue,
});
}
}
expiringSoon.sort((a, b) => {
const aCachedAt = a.cachedAt ?? 0;
const bCachedAt = b.cachedAt ?? 0;
if (aCachedAt !== bCachedAt) return bCachedAt - aCachedAt;
return b.ttlSeconds - a.ttlSeconds;
});
const jobCounts = {
pending: 0,
processing: 0,
completed: 0,
failed: 0,
};
for (const row of groupedJobs) {
if (row.status in jobCounts) {
jobCounts[row.status as keyof typeof jobCounts] = row._count.status;
}
}
const contentByType = {
movie: 0,
tvshow: 0,
};
for (const row of groupedTypes) {
if (row.type in contentByType) {
contentByType[row.type as keyof typeof contentByType] = row._count.type;
}
}
const averageDurationMs =
recentFinishedJobs.length === 0
? 0
: Math.round(
recentFinishedJobs.reduce((sum, job) => {
const duration = job.updatedAt.getTime() - job.createdAt.getTime();
return sum + Math.max(0, duration);
}, 0) / recentFinishedJobs.length
);
const totalCacheLookups = metricsSnapshot.cacheHits + metricsSnapshot.cacheMisses;
const cacheHitRate = totalCacheLookups
? Number((metricsSnapshot.cacheHits / totalCacheLookups).toFixed(4))
: 0;
const redisUsedBytes = parseRedisInfoValue(redisMemoryInfo, 'used_memory') ?? 0;
const redisMaxBytesRaw = parseRedisInfoValue(redisMemoryInfo, 'maxmemory');
const redisMaxBytes = redisMaxBytesRaw && redisMaxBytesRaw > 0 ? redisMaxBytesRaw : null;
return {
generatedAt: now.toISOString(),
environment: env.NODE_ENV,
cache: {
configuredTtlSeconds: env.REDIS_TTL_SECONDS,
keyCount: cacheKeys.length,
analyzedKeyLimit: MAX_CACHE_KEYS_FOR_ANALYSIS,
sampled: cacheSampled,
totalBytes,
redisMemory: {
usedBytes: redisUsedBytes,
maxBytes: redisMaxBytes,
},
ttlDistribution,
expiringSoon: expiringSoon.slice(0, 10),
},
content: {
total: totalContent,
byType: contentByType,
addedLast24h: recent24h,
addedLast7d: recent7d,
metadataGaps: {
missingPlot,
missingAgeRating,
missingBackdrop,
},
topGenres: topGenreLinks.map((item) => ({
name: genreMap.get(item.genreId) ?? 'Unknown',
count: item._count.genreId,
})),
},
jobs: {
counts: jobCounts,
averageDurationMs,
failedRecent: recentFailedJobs.map((job) => ({
id: job.id,
url: job.url,
error: job.error ?? 'Unknown error',
updatedAt: job.updatedAt.toISOString(),
})),
},
requestMetrics: {
cacheHits: metricsSnapshot.cacheHits,
cacheMisses: metricsSnapshot.cacheMisses,
cacheHitRate,
sourceCounts: metricsSnapshot.bySource,
},
};
}
static async clearCache(): Promise<AdminActionResponse> {
const { keys } = await collectCacheKeys();
if (keys.length === 0) {
return {
queued: 0,
skipped: 0,
details: 'No cache keys matched prefix',
};
}
await redis.del(...keys);
return {
queued: keys.length,
skipped: 0,
details: 'Cache keys deleted',
};
}
static async warmupCacheFromDatabase(): Promise<AdminActionResponse> {
const allContent = await prisma.content.findMany({
include: {
genres: {
include: {
genre: true,
},
},
castMembers: {
orderBy: { name: 'asc' },
},
},
orderBy: { createdAt: 'desc' },
});
let queued = 0;
for (const item of allContent) {
const apiPayload = ContentService.toApiResponse({
id: item.id,
url: item.url,
title: item.title,
year: item.year,
plot: item.plot,
backdropUrl: item.backdropUrl,
ageRating: item.ageRating,
type: item.type as 'movie' | 'tvshow',
currentSeason: item.currentSeason,
genres: item.genres.map((g) => g.genre.name),
cast: item.castMembers.map((c) => c.name),
createdAt: item.createdAt,
updatedAt: item.updatedAt,
});
await CacheService.set(item.url, apiPayload);
queued += 1;
}
return {
queued,
skipped: 0,
details: 'Database content written to Redis cache',
};
}
static async retryFailedJobs(limit: number): Promise<AdminActionResponse> {
const failedJobs = await prisma.scrapeJob.findMany({
where: { status: 'failed' },
orderBy: { updatedAt: 'desc' },
take: limit,
select: { url: true },
});
let queued = 0;
let skipped = 0;
const uniqueUrls = Array.from(new Set(failedJobs.map((job) => job.url)));
for (const url of uniqueUrls) {
const activeJob = await prisma.scrapeJob.findFirst({
where: { url, status: { in: ['pending', 'processing'] } },
select: { id: true },
});
if (activeJob) {
skipped += 1;
continue;
}
const job = await JobService.create(url);
JobService.process(job.id).catch(() => {
// async retry failures are reflected in job status
});
queued += 1;
}
return {
queued,
skipped,
details: 'Failed jobs retried',
};
}
static async refreshStaleContent(days: number, limit: number): Promise<AdminActionResponse> {
const threshold = new Date(Date.now() - days * 24 * 60 * 60 * 1000);
const staleContent = await prisma.content.findMany({
where: { updatedAt: { lt: threshold } },
orderBy: { updatedAt: 'asc' },
take: limit,
select: { url: true },
});
let queued = 0;
let skipped = 0;
for (const item of staleContent) {
const activeJob = await prisma.scrapeJob.findFirst({
where: { url: item.url, status: { in: ['pending', 'processing'] } },
select: { id: true },
});
if (activeJob) {
skipped += 1;
continue;
}
const job = await JobService.create(item.url);
JobService.process(job.id).catch(() => {
// async refresh failures are reflected in job status
});
queued += 1;
}
return {
queued,
skipped,
details: `Stale content refresh queued for items older than ${days} days`,
};
}
static async purgeAllContent(): Promise<AdminActionResponse> {
const totalContent = await prisma.content.count();
await prisma.$transaction([
prisma.content.deleteMany({}),
prisma.genre.deleteMany({}),
]);
await CacheService.clearAll();
return {
queued: totalContent,
skipped: 0,
details: 'Tum icerik verileri veritabanindan silindi',
};
}
}
export default AdminService;