feat(backend): realtime event system, admin API ve metrics altyapısı
- socket.ts: ContentRealtimeEvent, CacheRealtimeEvent, MetricsRealtimeEvent
tipleri eklendi; emitContentEvent / emitCacheEvent / emitMetricsEvent
fonksiyonları ile tüm istemcilere broadcast desteği getirildi.
emitJobCompleted imzası GetInfoResponse + DataSource ile güçlendirildi.
- auth.middleware.ts: require() tabanlı env erişimi static import'a
dönüştürüldü; admin-only endpointler için adminOnlyMiddleware eklendi
(X-API-Key !== API_KEY_ADMIN → 403).
- cache.service.ts: set / delete / clearAll işlemlerinden sonra
emitCacheEvent çağrısı eklenerek cache mutasyonları anlık yayınlanıyor.
- content.service.ts: create / update / delete sonrasında emitContentEvent
çağrısı eklenerek DB yazımları Socket.IO üzerinden duyuruluyor.
- job.service.ts: async ve sync akışa MetricsService entegrasyonu eklendi;
cache hit/miss ve kaynak (cache/database/netflix) sayaçları her işlemde
artırılıyor.
- types/index.ts: AdminOverviewResponse ve AdminActionResponse tipleri
merkezi olarak tanımlandı.
- admin.service.ts (yeni): getOverview, clearCache, warmupCacheFromDatabase,
retryFailedJobs, refreshStaleContent operasyonları implement edildi.
Redis pipeline ile TTL/boyut analizi ve DB metrikleri paralel toplanıyor.
- metrics.service.ts (yeni): Redis hash tabanlı cache hit/miss ve kaynak
sayaçları; her artışta MetricsRealtimeEvent yayınlanıyor.
- api.routes.ts: Admin endpointleri eklendi:
GET /api/admin/overview
POST /api/admin/cache/clear
POST /api/admin/cache/warmup
POST /api/admin/jobs/retry-failed
POST /api/admin/content/refresh-stale
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
455
src/services/admin.service.ts
Normal file
455
src/services/admin.service.ts
Normal file
@@ -0,0 +1,455 @@
|
||||
import prisma from '../config/database.js';
|
||||
import redis from '../config/redis.js';
|
||||
import { env } from '../config/env.js';
|
||||
import { JobService } from './job.service.js';
|
||||
import { MetricsService } from './metrics.service.js';
|
||||
import { CacheService } from './cache.service.js';
|
||||
import { ContentService } from './content.service.js';
|
||||
import type { AdminActionResponse, AdminOverviewResponse } from '../types/index.js';
|
||||
|
||||
const CACHE_PREFIX = 'netflix:content:';
|
||||
const MAX_CACHE_KEYS_FOR_ANALYSIS = 1000;
|
||||
|
||||
function formatCacheKeyLabel(key: string): string {
|
||||
return key.replace(CACHE_PREFIX, '');
|
||||
}
|
||||
|
||||
function extractTitleIdFromCacheKey(key: string): string | null {
|
||||
const normalized = formatCacheKeyLabel(key);
|
||||
return /^\d+$/.test(normalized) ? normalized : null;
|
||||
}
|
||||
|
||||
function extractTitleIdFromUrl(url: string): string | null {
|
||||
return url.match(/\/title\/(\d+)/)?.[1] ?? null;
|
||||
}
|
||||
|
||||
function parseRedisInfoValue(info: string, key: string): number | null {
|
||||
const line = info
|
||||
.split('\n')
|
||||
.map((item) => item.trim())
|
||||
.find((item) => item.startsWith(`${key}:`));
|
||||
if (!line) return null;
|
||||
const raw = line.slice(key.length + 1).trim();
|
||||
const value = Number.parseInt(raw, 10);
|
||||
return Number.isFinite(value) ? value : null;
|
||||
}
|
||||
|
||||
async function collectCacheKeys(limit?: number): Promise<{ keys: string[]; sampled: boolean }> {
|
||||
let cursor = '0';
|
||||
const keys: string[] = [];
|
||||
|
||||
do {
|
||||
const [nextCursor, batchKeys] = await redis.scan(
|
||||
cursor,
|
||||
'MATCH',
|
||||
`${CACHE_PREFIX}*`,
|
||||
'COUNT',
|
||||
200
|
||||
);
|
||||
|
||||
cursor = nextCursor;
|
||||
keys.push(...batchKeys);
|
||||
|
||||
if (limit && keys.length >= limit) {
|
||||
return { keys: keys.slice(0, limit), sampled: true };
|
||||
}
|
||||
} while (cursor !== '0');
|
||||
|
||||
return { keys, sampled: false };
|
||||
}
|
||||
|
||||
export class AdminService {
|
||||
static async getOverview(): Promise<AdminOverviewResponse> {
|
||||
const now = new Date();
|
||||
const oneDayAgo = new Date(now.getTime() - 24 * 60 * 60 * 1000);
|
||||
const sevenDaysAgo = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000);
|
||||
|
||||
const [
|
||||
totalContent,
|
||||
recent24h,
|
||||
recent7d,
|
||||
missingPlot,
|
||||
missingAgeRating,
|
||||
missingBackdrop,
|
||||
groupedTypes,
|
||||
groupedJobs,
|
||||
recentFailedJobs,
|
||||
recentFinishedJobs,
|
||||
topGenreLinks,
|
||||
{ keys: cacheKeys, sampled: cacheSampled },
|
||||
metricsSnapshot,
|
||||
redisMemoryInfo,
|
||||
] = await Promise.all([
|
||||
prisma.content.count(),
|
||||
prisma.content.count({ where: { createdAt: { gte: oneDayAgo } } }),
|
||||
prisma.content.count({ where: { createdAt: { gte: sevenDaysAgo } } }),
|
||||
prisma.content.count({ where: { plot: null } }),
|
||||
prisma.content.count({ where: { ageRating: null } }),
|
||||
prisma.content.count({ where: { backdropUrl: null } }),
|
||||
prisma.content.groupBy({ by: ['type'], _count: { type: true } }),
|
||||
prisma.scrapeJob.groupBy({ by: ['status'], _count: { status: true } }),
|
||||
prisma.scrapeJob.findMany({
|
||||
where: { status: 'failed' },
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
take: 8,
|
||||
select: { id: true, url: true, error: true, updatedAt: true },
|
||||
}),
|
||||
prisma.scrapeJob.findMany({
|
||||
where: { status: { in: ['completed', 'failed'] } },
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
take: 300,
|
||||
select: { createdAt: true, updatedAt: true },
|
||||
}),
|
||||
prisma.contentGenre.groupBy({
|
||||
by: ['genreId'],
|
||||
_count: { genreId: true },
|
||||
orderBy: { _count: { genreId: 'desc' } },
|
||||
take: 10,
|
||||
}),
|
||||
collectCacheKeys(MAX_CACHE_KEYS_FOR_ANALYSIS),
|
||||
MetricsService.getSnapshot(),
|
||||
redis.info('memory').catch(() => ''),
|
||||
]);
|
||||
|
||||
const genreIds = topGenreLinks.map((item) => item.genreId);
|
||||
const genres = genreIds.length
|
||||
? await prisma.genre.findMany({
|
||||
where: { id: { in: genreIds } },
|
||||
select: { id: true, name: true },
|
||||
})
|
||||
: [];
|
||||
|
||||
const genreMap = new Map(genres.map((genre) => [genre.id, genre.name]));
|
||||
|
||||
const ttlPipeline = redis.pipeline();
|
||||
const sizePipeline = redis.pipeline();
|
||||
const valuePipeline = redis.pipeline();
|
||||
|
||||
for (const key of cacheKeys) {
|
||||
ttlPipeline.ttl(key);
|
||||
sizePipeline.strlen(key);
|
||||
valuePipeline.get(key);
|
||||
}
|
||||
|
||||
const [ttlResults, sizeResults, valueResults] = await Promise.all([
|
||||
ttlPipeline.exec(),
|
||||
sizePipeline.exec(),
|
||||
valuePipeline.exec(),
|
||||
]);
|
||||
|
||||
const ttlDistribution = {
|
||||
expiredOrNoTtl: 0,
|
||||
lessThan5Min: 0,
|
||||
min5To30: 0,
|
||||
min30Plus: 0,
|
||||
};
|
||||
|
||||
const cacheTitleIds = Array.from(
|
||||
new Set(cacheKeys.map((key) => extractTitleIdFromCacheKey(key)).filter((id): id is string => Boolean(id)))
|
||||
);
|
||||
|
||||
const relatedContent = cacheTitleIds.length
|
||||
? await prisma.content.findMany({
|
||||
where: {
|
||||
OR: cacheTitleIds.map((id) => ({
|
||||
url: { contains: `/title/${id}` },
|
||||
})),
|
||||
},
|
||||
select: {
|
||||
url: true,
|
||||
title: true,
|
||||
},
|
||||
})
|
||||
: [];
|
||||
|
||||
const titleMap = new Map<string, string>();
|
||||
for (const item of relatedContent) {
|
||||
const id = extractTitleIdFromUrl(item.url);
|
||||
if (id && !titleMap.has(id)) {
|
||||
titleMap.set(id, item.title);
|
||||
}
|
||||
}
|
||||
|
||||
const expiringSoon: {
|
||||
key: string;
|
||||
mediaTitle?: string | null;
|
||||
cachedAt?: number | null;
|
||||
ttlSeconds: number;
|
||||
}[] = [];
|
||||
let totalBytes = 0;
|
||||
|
||||
for (let i = 0; i < cacheKeys.length; i += 1) {
|
||||
const ttlValue = Number(ttlResults?.[i]?.[1] ?? -2);
|
||||
const sizeValue = Number(sizeResults?.[i]?.[1] ?? 0);
|
||||
const safeSize = Number.isFinite(sizeValue) ? Math.max(0, sizeValue) : 0;
|
||||
totalBytes += safeSize;
|
||||
|
||||
if (ttlValue <= 0) {
|
||||
ttlDistribution.expiredOrNoTtl += 1;
|
||||
} else if (ttlValue < 300) {
|
||||
ttlDistribution.lessThan5Min += 1;
|
||||
} else if (ttlValue <= 1800) {
|
||||
ttlDistribution.min5To30 += 1;
|
||||
} else {
|
||||
ttlDistribution.min30Plus += 1;
|
||||
}
|
||||
|
||||
if (ttlValue > 0) {
|
||||
const formattedKey = formatCacheKeyLabel(cacheKeys[i] || '');
|
||||
const titleId = extractTitleIdFromCacheKey(cacheKeys[i] || '');
|
||||
const rawValue = valueResults?.[i]?.[1];
|
||||
let cachedAt: number | null = null;
|
||||
if (typeof rawValue === 'string') {
|
||||
try {
|
||||
const parsed = JSON.parse(rawValue) as { cachedAt?: unknown };
|
||||
cachedAt = typeof parsed.cachedAt === 'number' ? parsed.cachedAt : null;
|
||||
} catch {
|
||||
cachedAt = null;
|
||||
}
|
||||
}
|
||||
expiringSoon.push({
|
||||
key: formattedKey,
|
||||
mediaTitle: titleId ? titleMap.get(titleId) ?? null : null,
|
||||
cachedAt,
|
||||
ttlSeconds: ttlValue,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
expiringSoon.sort((a, b) => {
|
||||
const aCachedAt = a.cachedAt ?? 0;
|
||||
const bCachedAt = b.cachedAt ?? 0;
|
||||
if (aCachedAt !== bCachedAt) return bCachedAt - aCachedAt;
|
||||
return b.ttlSeconds - a.ttlSeconds;
|
||||
});
|
||||
|
||||
const jobCounts = {
|
||||
pending: 0,
|
||||
processing: 0,
|
||||
completed: 0,
|
||||
failed: 0,
|
||||
};
|
||||
|
||||
for (const row of groupedJobs) {
|
||||
if (row.status in jobCounts) {
|
||||
jobCounts[row.status as keyof typeof jobCounts] = row._count.status;
|
||||
}
|
||||
}
|
||||
|
||||
const contentByType = {
|
||||
movie: 0,
|
||||
tvshow: 0,
|
||||
};
|
||||
|
||||
for (const row of groupedTypes) {
|
||||
if (row.type in contentByType) {
|
||||
contentByType[row.type as keyof typeof contentByType] = row._count.type;
|
||||
}
|
||||
}
|
||||
|
||||
const averageDurationMs =
|
||||
recentFinishedJobs.length === 0
|
||||
? 0
|
||||
: Math.round(
|
||||
recentFinishedJobs.reduce((sum, job) => {
|
||||
const duration = job.updatedAt.getTime() - job.createdAt.getTime();
|
||||
return sum + Math.max(0, duration);
|
||||
}, 0) / recentFinishedJobs.length
|
||||
);
|
||||
|
||||
const totalCacheLookups = metricsSnapshot.cacheHits + metricsSnapshot.cacheMisses;
|
||||
const cacheHitRate = totalCacheLookups
|
||||
? Number((metricsSnapshot.cacheHits / totalCacheLookups).toFixed(4))
|
||||
: 0;
|
||||
const redisUsedBytes = parseRedisInfoValue(redisMemoryInfo, 'used_memory') ?? 0;
|
||||
const redisMaxBytesRaw = parseRedisInfoValue(redisMemoryInfo, 'maxmemory');
|
||||
const redisMaxBytes = redisMaxBytesRaw && redisMaxBytesRaw > 0 ? redisMaxBytesRaw : null;
|
||||
|
||||
return {
|
||||
generatedAt: now.toISOString(),
|
||||
environment: env.NODE_ENV,
|
||||
cache: {
|
||||
configuredTtlSeconds: env.REDIS_TTL_SECONDS,
|
||||
keyCount: cacheKeys.length,
|
||||
analyzedKeyLimit: MAX_CACHE_KEYS_FOR_ANALYSIS,
|
||||
sampled: cacheSampled,
|
||||
totalBytes,
|
||||
redisMemory: {
|
||||
usedBytes: redisUsedBytes,
|
||||
maxBytes: redisMaxBytes,
|
||||
},
|
||||
ttlDistribution,
|
||||
expiringSoon: expiringSoon.slice(0, 10),
|
||||
},
|
||||
content: {
|
||||
total: totalContent,
|
||||
byType: contentByType,
|
||||
addedLast24h: recent24h,
|
||||
addedLast7d: recent7d,
|
||||
metadataGaps: {
|
||||
missingPlot,
|
||||
missingAgeRating,
|
||||
missingBackdrop,
|
||||
},
|
||||
topGenres: topGenreLinks.map((item) => ({
|
||||
name: genreMap.get(item.genreId) ?? 'Unknown',
|
||||
count: item._count.genreId,
|
||||
})),
|
||||
},
|
||||
jobs: {
|
||||
counts: jobCounts,
|
||||
averageDurationMs,
|
||||
failedRecent: recentFailedJobs.map((job) => ({
|
||||
id: job.id,
|
||||
url: job.url,
|
||||
error: job.error ?? 'Unknown error',
|
||||
updatedAt: job.updatedAt.toISOString(),
|
||||
})),
|
||||
},
|
||||
requestMetrics: {
|
||||
cacheHits: metricsSnapshot.cacheHits,
|
||||
cacheMisses: metricsSnapshot.cacheMisses,
|
||||
cacheHitRate,
|
||||
sourceCounts: metricsSnapshot.bySource,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
static async clearCache(): Promise<AdminActionResponse> {
|
||||
const { keys } = await collectCacheKeys();
|
||||
if (keys.length === 0) {
|
||||
return {
|
||||
queued: 0,
|
||||
skipped: 0,
|
||||
details: 'No cache keys matched prefix',
|
||||
};
|
||||
}
|
||||
|
||||
await redis.del(...keys);
|
||||
return {
|
||||
queued: keys.length,
|
||||
skipped: 0,
|
||||
details: 'Cache keys deleted',
|
||||
};
|
||||
}
|
||||
|
||||
static async warmupCacheFromDatabase(): Promise<AdminActionResponse> {
|
||||
const allContent = await prisma.content.findMany({
|
||||
include: {
|
||||
genres: {
|
||||
include: {
|
||||
genre: true,
|
||||
},
|
||||
},
|
||||
castMembers: {
|
||||
orderBy: { name: 'asc' },
|
||||
},
|
||||
},
|
||||
orderBy: { createdAt: 'desc' },
|
||||
});
|
||||
|
||||
let queued = 0;
|
||||
for (const item of allContent) {
|
||||
const apiPayload = ContentService.toApiResponse({
|
||||
id: item.id,
|
||||
url: item.url,
|
||||
title: item.title,
|
||||
year: item.year,
|
||||
plot: item.plot,
|
||||
backdropUrl: item.backdropUrl,
|
||||
ageRating: item.ageRating,
|
||||
type: item.type as 'movie' | 'tvshow',
|
||||
currentSeason: item.currentSeason,
|
||||
genres: item.genres.map((g) => g.genre.name),
|
||||
cast: item.castMembers.map((c) => c.name),
|
||||
createdAt: item.createdAt,
|
||||
updatedAt: item.updatedAt,
|
||||
});
|
||||
|
||||
await CacheService.set(item.url, apiPayload);
|
||||
queued += 1;
|
||||
}
|
||||
|
||||
return {
|
||||
queued,
|
||||
skipped: 0,
|
||||
details: 'Database content written to Redis cache',
|
||||
};
|
||||
}
|
||||
|
||||
static async retryFailedJobs(limit: number): Promise<AdminActionResponse> {
|
||||
const failedJobs = await prisma.scrapeJob.findMany({
|
||||
where: { status: 'failed' },
|
||||
orderBy: { updatedAt: 'desc' },
|
||||
take: limit,
|
||||
select: { url: true },
|
||||
});
|
||||
|
||||
let queued = 0;
|
||||
let skipped = 0;
|
||||
const uniqueUrls = Array.from(new Set(failedJobs.map((job) => job.url)));
|
||||
|
||||
for (const url of uniqueUrls) {
|
||||
const activeJob = await prisma.scrapeJob.findFirst({
|
||||
where: { url, status: { in: ['pending', 'processing'] } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (activeJob) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const job = await JobService.create(url);
|
||||
JobService.process(job.id).catch(() => {
|
||||
// async retry failures are reflected in job status
|
||||
});
|
||||
queued += 1;
|
||||
}
|
||||
|
||||
return {
|
||||
queued,
|
||||
skipped,
|
||||
details: 'Failed jobs retried',
|
||||
};
|
||||
}
|
||||
|
||||
static async refreshStaleContent(days: number, limit: number): Promise<AdminActionResponse> {
|
||||
const threshold = new Date(Date.now() - days * 24 * 60 * 60 * 1000);
|
||||
const staleContent = await prisma.content.findMany({
|
||||
where: { updatedAt: { lt: threshold } },
|
||||
orderBy: { updatedAt: 'asc' },
|
||||
take: limit,
|
||||
select: { url: true },
|
||||
});
|
||||
|
||||
let queued = 0;
|
||||
let skipped = 0;
|
||||
|
||||
for (const item of staleContent) {
|
||||
const activeJob = await prisma.scrapeJob.findFirst({
|
||||
where: { url: item.url, status: { in: ['pending', 'processing'] } },
|
||||
select: { id: true },
|
||||
});
|
||||
|
||||
if (activeJob) {
|
||||
skipped += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
const job = await JobService.create(item.url);
|
||||
JobService.process(job.id).catch(() => {
|
||||
// async refresh failures are reflected in job status
|
||||
});
|
||||
queued += 1;
|
||||
}
|
||||
|
||||
return {
|
||||
queued,
|
||||
skipped,
|
||||
details: `Stale content refresh queued for items older than ${days} days`,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default AdminService;
|
||||
Reference in New Issue
Block a user