first commit

This commit is contained in:
2026-02-28 02:44:41 +03:00
commit 97fb289fe7
70 changed files with 11928 additions and 0 deletions

52
.dockerignore Normal file
View File

@@ -0,0 +1,52 @@
# Dependencies
node_modules/
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Build output
dist/
build/
# Environment files
.env
.env.local
.env.*.local
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# Git
.git/
.gitignore
# Docker
Dockerfile*
docker-compose*.yml
.dockerignore
# Documentation
doc/
*.md
!README.md
# Prisma
prisma/migrations/*_*
# Test
coverage/
.nyc_output/
# Misc
*.log
*.pid
*.seed
*.pid.lock

41
.env.example Normal file
View File

@@ -0,0 +1,41 @@
# ===========================================
# Netflix Scraper API - Environment Variables
# Copy this file to .env and fill in the values
# ===========================================
# === Server Configuration ===
PORT=3000
NODE_ENV=development
# === PostgreSQL Configuration ===
POSTGRES_HOST=localhost
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=your-secure-password-here
POSTGRES_DB=netflix_scraper
# === Redis Configuration ===
REDIS_HOST=localhost
REDIS_PORT=6379
# Cache TTL in seconds (default: 7 days = 604800)
REDIS_TTL_SECONDS=604800
# === Rate Limiting Configuration ===
# Time window in milliseconds (default: 1 minute)
RATE_LIMIT_WINDOW_MS=60000
# Maximum requests per window per IP
RATE_LIMIT_MAX_REQUESTS=30
# === API Keys (for frontend authentication) ===
# Generate secure random keys for production!
API_KEY_WEB=web-frontend-key-change-me-in-production
API_KEY_MOBILE=mobile-app-key-change-me-in-production
API_KEY_ADMIN=admin-key-super-secret-change-me
# === TMDB API Configuration ===
# Get your API key from https://www.themoviedb.org/settings/api
TMDB_API_KEY=your-tmdb-api-key-here
TMDB_ACCESS_TOKEN=your-tmdb-access-token-here
# === Optional: Logging ===
# LOG_LEVEL=info # debug, info, warn, error

37
.gitignore vendored Normal file
View File

@@ -0,0 +1,37 @@
# Dependencies
node_modules/
# Build output
dist/
# Environment files
.env
.env.local
.env.*.local
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# Logs
logs/
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
# Test coverage
coverage/
.nyc_output/
# Misc
*.pid
*.seed
*.pid.lock

86
Dockerfile Normal file
View File

@@ -0,0 +1,86 @@
# ===========================================
# Stage 1: Dependencies
# ===========================================
FROM node:20-alpine AS deps
WORKDIR /app
# Install libc6-compat and openssl for Prisma
RUN apk add --no-cache libc6-compat openssl
# Set dummy DATABASE_URL for Prisma generate (doesn't need real connection)
ENV DATABASE_URL="postgresql://dummy:dummy@dummy:5432/dummy"
# Copy package files
COPY package.json package-lock.json* ./
COPY prisma ./prisma/
# Install dependencies
RUN npm ci --include=dev
# Generate Prisma client
RUN npx prisma generate
# ===========================================
# Stage 2: Builder
# ===========================================
FROM node:20-alpine AS builder
WORKDIR /app
# Copy dependencies from deps stage
COPY --from=deps /app/node_modules ./node_modules
COPY --from=deps /app/prisma ./prisma
# Copy source files
COPY . .
# Build TypeScript
RUN npm run build
# Prune dev dependencies
RUN npm prune --production
# ===========================================
# Stage 3: Production Runner
# ===========================================
FROM node:20-alpine AS runner
WORKDIR /app
# Install libc6-compat and openssl for Prisma (busybox includes netcat)
RUN apk add --no-cache libc6-compat openssl
# Create non-root user for security
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 appuser
# Set environment
ENV NODE_ENV=production
ENV PORT=3000
# Copy built files and dependencies
COPY --from=builder /app/dist ./dist
COPY --from=builder /app/node_modules ./node_modules
COPY --from=builder /app/prisma ./prisma
COPY --from=builder /app/package.json ./
COPY scripts/startup.sh ./scripts/startup.sh
# Set ownership
RUN chown -R appuser:nodejs /app
# Make startup script executable
RUN chmod +x ./scripts/startup.sh
# Switch to non-root user
USER appuser
# Expose port
EXPOSE 3000
# Health check
HEALTHCHECK --interval=30s --timeout=10s --start-period=30s --retries=3 \
CMD wget --no-verbose --tries=1 --spider http://localhost:3000/health || exit 1
# Start application with startup script
CMD ["sh", "./scripts/startup.sh"]

36
Dockerfile.dev Normal file
View File

@@ -0,0 +1,36 @@
# ===========================================
# Development Dockerfile with Hot Reload
# ===========================================
FROM node:20-alpine
WORKDIR /app
# Install dependencies for development (Prisma needs libc6-compat and openssl)
RUN apk add --no-cache libc6-compat openssl
# Set environment
ENV NODE_ENV=development
ENV PORT=3000
# Set dummy DATABASE_URL for Prisma generate (doesn't need real connection)
ENV DATABASE_URL="postgresql://dummy:dummy@dummy:5432/dummy"
# Copy package files
COPY package.json package-lock.json* ./
COPY prisma ./prisma/
# Install all dependencies (including dev)
RUN npm install
# Generate Prisma client
RUN npx prisma generate
# Copy startup script
COPY scripts/dev-startup.sh ./scripts/dev-startup.sh
RUN chmod +x ./scripts/dev-startup.sh
# Expose port
EXPOSE 3000
# Start with dev startup script (includes migrations)
CMD ["sh", "./scripts/dev-startup.sh"]

171
README.md Normal file
View File

@@ -0,0 +1,171 @@
# Netflix Scraper API
Netflix içerik sayfalarından film/dizi bilgilerini çeken yüksek performanslı bir backend API servisi.
## Özellikler
- **Scraping**: Netflix URL'lerinden otomatik içerik bilgisi çekme
- **Cache**: Redis ile 7 günlük önbellek (yapılandırılabilir)
- **Kalıcılık**: PostgreSQL ile verilerin kalıcı saklanması
- **Real-time**: Socket.IO ile canlı ilerleme bildirimleri
- **Güvenlik**: API Key authentication ve rate limiting
- **Docker**: Tek komut ile ayağa kalkma
## Hızlı Başlangıç
```bash
# .env dosyasını oluştur
cp .env.example .env
# API key'leri düzenle
nano .env
# Başlat (tek komut!)
docker compose -f docker-compose.dev.yml up --build
```
API şu adreste çalışacak: `http://localhost:3000`
## API Kullanımı
### İstek Örneği
```bash
curl -X POST http://localhost:3000/api/getinfo \
-H "Content-Type: application/json" \
-H "X-API-Key: web-dev-key-change-me" \
-d '{"url": "https://www.netflix.com/tr/title/81616256"}'
```
### Yanıt Örneği
```json
{
"success": true,
"data": {
"title": "Hayata Röveşata Çeken Adam",
"year": 2022,
"plot": "Dünyaya karşı duyduğu öfke...",
"genres": ["18+", "Komedi"],
"cast": ["Tom Hanks", "Mariana Treviño", "Rachel Keller"],
"backdrop": "https://occ-0-7335-..."
}
}
```
## Endpoints
| Method | Endpoint | Açıklama |
|--------|----------|----------|
| `GET` | `/health` | Sağlık kontrolü |
| `GET` | `/ready` | Bağımlılık kontrolü |
| `POST` | `/api/getinfo` | İçerik bilgisi getir |
| `POST` | `/api/getinfo/async` | Asenkron job oluştur |
| `GET` | `/api/jobs/:jobId` | Job durumu sorgula |
## Socket.IO Events
| Event | Yön | Açıklama |
|-------|-----|----------|
| `job:subscribe` | Client → Server | Job'a abone ol |
| `job:progress` | Server → Client | İlerleme güncellemesi |
| `job:completed` | Server → Client | İşlem tamamlandı |
| `job:error` | Server → Client | Hata oluştu |
## Environment Değişkenleri
| Değişken | Açıklama | Varsayılan |
|----------|----------|------------|
| `PORT` | Sunucu portu | `3000` |
| `NODE_ENV` | Ortam | `development` |
| `POSTGRES_*` | PostgreSQL ayarları | - |
| `REDIS_*` | Redis ayarları | - |
| `REDIS_TTL_SECONDS` | Cache süresi | `604800` (7 gün) |
| `RATE_LIMIT_*` | Rate limit ayarları | - |
| `API_KEY_*` | API anahtarları | - |
## Migration
Migration'lar otomatik olarak container başlatılırken çalışır.
```bash
# Manuel migration
docker compose exec app npx prisma migrate deploy
```
## Teknoloji Yığını
| Katman | Teknoloji |
|--------|-----------|
| Runtime | Node.js 20+ |
| Framework | Express.js |
| Database | PostgreSQL 16 |
| Cache | Redis 7 |
| Real-time | Socket.IO 4 |
| Scraper | Cheerio |
| ORM | Prisma |
## Proje Yapısı
```
.
├── src/
│ ├── config/ # Yapılandırma (env, database, redis, socket)
│ ├── middleware/ # Express middleware (auth, rate-limit, validation)
│ ├── routes/ # API rotaları
│ ├── services/ # İş mantığı (scraper, cache, content, job)
│ ├── types/ # TypeScript tipleri
│ └── utils/ # Yardımcı fonksiyonlar
├── prisma/
│ ├── schema.prisma # Veritabanı şeması
│ └── seed.ts # Başlangıç verileri
├── doc/
│ ├── overview.md # Proje özeti
│ ├── api.md # API dokümantasyonu
│ ├── ops.md # Operasyon notları
│ └── socket-events.md # Socket.IO events
├── docker-compose.dev.yml
├── docker-compose.yml
├── Dockerfile
└── package.json
```
## Dokümantasyon
- **Proje Özeti**: [`doc/overview.md`](doc/overview.md)
- **API Dokümantasyonu**: [`doc/api.md`](doc/api.md)
- **Operasyon**: [`doc/ops.md`](doc/ops.md)
- **Socket Events**: [`doc/socket-events.md`](doc/socket-events.md)
## Geliştirme
### Local Development
```bash
# Bağımlılıkları yükle
npm install
# Prisma client oluştur
npx prisma generate
# Development modda çalıştır
npm run dev
```
### Production Build
```bash
npm run build
npm start
```
## Güvenlik
- **API Key Authentication**: Tüm istekler API key gerektirir
- **Rate Limiting**: Dakikada max 30 istek (yapılandırılabilir)
- **Non-root Container**: Production container'ları root olmayan kullanıcı ile çalışır
- **Input Validation**: Tüm girdiler Zod ile doğrulanır
## Lisans
MIT

133
SKILL.MD Normal file
View File

@@ -0,0 +1,133 @@
# AI Project Skills (MUST APPLY)
Bu doküman, projeyi geliştirirken AI agentın (Claude) UYGULAMAK ZORUNDA olduğu kuralları tanımlar.
Buradaki her madde “MUST” seviyesindedir. İstisna yoktur.
Bir değişiklik bu kuralları etkiliyorsa aynı değişiklikte dokümanlar da güncellenmelidir.
---
## 1) Docker & Compose Standartları (MUST)
- Projede Docker altyapısı kullanılacak.
- İki compose dosyası olacak:
- `docker-compose.dev.yml` (dev)
- `docker-compose.yml` (prod)
- Dev ortamında yapılan değişiklikler canlı yansıyacak (hot reload / watch).
- Proje DEV ortamında **tek komut** ile ayağa kalkmalıdır:
- `docker compose -f docker-compose.dev.yml up --build`
- Bu komut dışında:
- manuel migrate/seed/kurulum komutu çalıştırmak
- ekstra adım istemek
- “önce şunu yap sonra bunu yap” tarzı süreçler
**KABUL EDİLMEZ.**
- Servisler için healthcheck tanımlanacak (db/redis/app) ve bağımlılıklar healthcheck üzerinden yönetilecek.
- Docker imageları mümkünse multi-stage build ile üretilecek; prod image minimal olacak; container non-root user ile çalışacak.
---
## 2) Ortam Değişkenleri (MUST)
- Ortam değişkenleri `.env` üzerinden tanımlanacak.
- Repo içinde `.env` bulunmayacak (gitignore).
- `.env.example` dosyası oluşturulacak.
- `.env`yi etkileyen her güncellemede `.env.example` da aynı PR/commit içinde güncellenecek.
- Compose dosyalarında:
- `.env` dosyası okunacak ve değişkenler servislere aktarılacak.
- `.env` içinde olmayan bir değişken kullanılıyorsa compose içinde DEFAULT değer verilecek (örn. `${VAR:-default}`).
- Uygulama ayağa kalkarken env doğrulaması yapılacak:
- Eksik değişken varsa fail-fast (startupta hata).
- Tip doğrulaması yapılacak (string/number/boolean).
- Öneri: `zod` / `envalid` benzeri bir yaklaşım.
---
## 3) Backend Teknoloji Yığını (Koşullu MUST)
Backend kullanılacaksa aşağıdakiler ZORUNLUDUR:
- Node.js
- Socket.IO
- Redis
- PostgreSQL
Backend için ek zorunluluklar:
- `/health` ve `/ready` endpointleri olacak.
- Structured logging (JSON) kullanılacak, log seviyeleri (debug/info/warn/error) standardize edilecek.
- Hata yanıt formatı tutarlı olacak (error code + message + details).
---
## 4) PostgreSQL Şema Yönetimi (MUST) — Tek Komut Kuralı ile Uyumlu
- PostgreSQL kullanılıyorsa şema yönetimi **deterministic** olmalıdır:
- Temiz kurulumda ve güncellemelerde aynı sonuca güvenilir şekilde ulaşmalıdır.
- Migration yaklaşımı kullanılacaksa:
- Migrationlar **backend container startup adımının parçası** olarak otomatik çalıştırılacaktır.
- Kullanıcıdan manuel olarak `migrate/seed` komutu çalıştırması istenmeyecektir.
- Backend, DB hazır olana kadar bekleyecek (healthcheck + retry/backoff) ve ardından:
1) migrationı çalıştıracak
2) uygulamayı başlatacaktır
- DB migration aracı seçilebilir (Prisma/Knex/TypeORM/Flyway vb.) ancak seçilen araç:
- READMEde ve `/doc/ops.md` içinde açıkça belirtilecek
- dev/prod için aynı “tek komut” felsefesini bozmayacak şekilde entegre edilecektir.
- Opsiyonel seed gerekiyorsa (ör. admin user vb.):
- seed işlemi de otomatik olacak
- yine manuel komut gerektirmeyecek.
---
## 5) Frontend Teknoloji Yığını (Koşullu MUST)
Frontend kullanılacaksa aşağıdakiler ZORUNLUDUR:
- React
- React Router
- Font Awesome
Frontend için ek zorunluluklar:
- Socket ile gelen canlı veriler ve state yönetimi READMEde ve `/doc/socket-events.md` içinde açıklanacak.
- Uygulama konfigürasyonu (API base url, socket url vb.) `.env` üzerinden yönetilecek.
---
## 6) Dokümantasyon Zorunlulukları (MUST)
- Projedeki tüm dökümanlar `/doc` dizini altında olacak.
- Proje için mutlaka bir “özet dokümanı” hazırlanacak:
- `/doc/overview.md`
- İçerik: Projenin ne yaptığı, neden oluşturulduğu, nasıl ilerlediği, önemli kararlar, güncellemeler.
- Projeye yapılan her güncellemede bu dosya gerektiği kadar güncellenecek.
- API ve Socket sözleşmeleri dokümante edilecek:
- REST varsa: `/doc/api.md` (OpenAPI/Swagger referansı dahil)
- Socket varsa: `/doc/socket-events.md` (event adı, yön, payload şeması, örnek)
- Operasyon / çalıştırma notları: `/doc/ops.md` (build, run, dev/prod, troubleshooting).
- Mimari kararlar için `/doc/decisions/ADR-XXXX.md` formatı kullanılacak (kısa ve net).
---
## 7) README Standartları (MUST)
- README her zaman güncel kalacak.
- Projeye ait frontend ve backend bilgileri detaylı anlatılacak:
- Kurulum
- Dev/Prod çalıştırma
- Env değişkenleri
- Endpointlerin ne iş yaptığı
- Socket ile iletilen canlı veriler (event listesi)
- Migration/seed yaklaşımı (varsa) ve “tek komut” akışının açıklaması
- README içinde anlatım Font Awesome ikonları ile zenginleştirilecek.
- README, `/doc` içindeki ana dokümanlara link verecek.
---
## 8) Değişiklik Kuralı (MUST)
- Bir değişiklik:
- `.env` / `.env.example`i etkiliyorsa ikisi birlikte güncellenecek.
- API/Socket sözleşmesini etkiliyorsa `/doc/api.md` veya `/doc/socket-events.md` güncellenecek.
- Çalıştırma şeklini (docker/komut/akış) etkiliyorsa README ve `/doc/ops.md` güncellenecek.
- DB şemasını etkiliyorsa migration/init/ops dokümanları aynı değişiklikte güncellenecek.
- Bu güncellemeler yapılmadan değişiklik “tamamlandı” sayılmaz.
---
## 9) İletişim Kuralı (MUST)
- Proje oluşturulurken ve geliştirilirken tüm iletişim Türkçe yapılacak.

256
doc/api.md Normal file
View File

@@ -0,0 +1,256 @@
# Netflix Scraper API - API Dokümantasyonu
## Base URL
```
Development: http://localhost:3000
Production: https://api.yourdomain.com
```
## Authentication
Tüm API istekleri `X-API-Key` header'ı gerektirir.
```http
X-API-Key: your-api-key-here
```
### API Key Tipleri
| Key | Kullanım |
|-----|----------|
| `API_KEY_WEB` | Web frontend |
| `API_KEY_MOBILE` | Mobil uygulama |
| `API_KEY_ADMIN` | Admin panel |
---
## Endpoints
### POST /api/getinfo
Netflix URL'sinden içerik bilgisi getirir.
**Request**
```http
POST /api/getinfo
Content-Type: application/json
X-API-Key: your-api-key
{
"url": "https://www.netflix.com/tr/title/81616256"
}
```
**Response (Success)**
```json
{
"success": true,
"data": {
"title": "Hayata Röveşata Çeken Adam",
"year": 2022,
"plot": "Dünyaya karşı duyduğu öfke ve yaşadığı kederin katılaştırdığı huysuz bir emekli, yaşamına son vermeyi planlar. Ancak hayatına neşeli bir genç aile girince tüm planları suya düşer.",
"genres": ["18+", "Komedi"],
"cast": ["Tom Hanks", "Mariana Treviño", "Rachel Keller"],
"backdrop": "https://occ-0-7335-3467.1.nflxso.net/dnm/api/v6/..."
}
}
```
**Response (Error)**
```json
{
"success": false,
"error": {
"code": "VALIDATION_ERROR",
"message": "Invalid request parameters",
"details": {
"errors": [
{
"field": "url",
"message": "URL must be a valid Netflix title URL"
}
]
}
}
}
```
**Status Codes**
| Code | Açıklama |
|------|----------|
| 200 | Başarılı |
| 400 | Geçersiz istek |
| 401 | API key eksik |
| 403 | Geçersiz API key |
| 429 | Rate limit aşıldı |
| 500 | Sunucu hatası |
---
### POST /api/getinfo/async
Asenkron scraping job'u oluşturur. Büyük ölçekli kullanım için uygundur.
**Request**
```http
POST /api/getinfo/async
Content-Type: application/json
X-API-Key: your-api-key
{
"url": "https://www.netflix.com/tr/title/81616256"
}
```
**Response**
```json
{
"success": true,
"data": {
"jobId": "550e8400-e29b-41d4-a716-446655440000",
"status": "pending"
}
}
```
**Socket ile İzleme**
Job durumunu Socket.IO ile izleyebilirsiniz:
```javascript
socket.emit('job:subscribe', jobId);
socket.on('job:progress', (data) => console.log(data));
socket.on('job:completed', (data) => console.log(data));
socket.on('job:error', (data) => console.error(data));
```
---
### GET /api/jobs/:jobId
Job durumunu sorgular.
**Request**
```http
GET /api/jobs/550e8400-e29b-41d4-a716-446655440000
X-API-Key: your-api-key
```
**Response**
```json
{
"success": true,
"data": {
"id": "550e8400-e29b-41d4-a716-446655440000",
"url": "https://www.netflix.com/tr/title/81616256",
"status": "completed",
"progress": 100,
"step": "completed",
"result": {
"title": "Hayata Röveşata Çeken Adam",
"year": 2022,
"plot": "...",
"genres": ["18+", "Komedi"],
"cast": ["Tom Hanks", "Mariana Treviño", "Rachel Keller"],
"backdrop": "https://..."
},
"createdAt": "2025-02-27T10:00:00.000Z",
"updatedAt": "2025-02-27T10:00:05.000Z"
}
}
```
---
### GET /health
Basit sağlık kontrolü.
**Response**
```json
{
"status": "ok",
"timestamp": "2025-02-27T10:00:00.000Z",
"uptime": 3600
}
```
---
### GET /ready
Tüm bağımlılıkların hazır olup olmadığını kontrol eder.
**Response**
```json
{
"status": "ready",
"timestamp": "2025-02-27T10:00:00.000Z",
"checks": {
"database": "healthy",
"redis": "healthy"
},
"env": "production"
}
```
---
## Error Codes
| Code | Açıklama |
|------|----------|
| `MISSING_API_KEY` | API key header'ı eksik |
| `INVALID_API_KEY` | Geçersiz API key |
| `VALIDATION_ERROR` | İstek parametreleri geçersiz |
| `RATE_LIMIT_EXCEEDED` | Genel rate limit aşıldı |
| `SCRAPE_RATE_LIMIT_EXCEEDED` | Scraping rate limit aşıldı |
| `SCRAPE_ERROR` | Netflix'ten veri çekilemedi |
| `JOB_NOT_FOUND` | Job bulunamadı |
| `INTERNAL_ERROR` | Beklenmeyen sunucu hatası |
---
## Rate Limiting
### Genel Rate Limit
- **Window**: 1 dakika
- **Max İstek**: 30 istek/dakika/IP+APIKey
### Scraping Rate Limit
- **Window**: 1 dakika
- **Max İstek**: 10 istek/dakika/IP+APIKey
Rate limit değerleri `.env` dosyasından yapılandırılabilir.
---
## Request/Response Formatları
### Content Data (GetInfoResponse)
| Alan | Tip | Açıklama |
|------|-----|----------|
| `title` | string | İçerik başlığı |
| `year` | number \| null | Yayın yılı |
| `plot` | string \| null | Açıklama/özet |
| `genres` | string[] | Tür listesi |
| `cast` | string[] | Oyuncu listesi |
| `backdrop` | string \| null | Arka plan görseli URL |
---
## OpenAPI / Swagger
OpenAPI spesifikasyonu için: `/api/docs` (yakında)

View File

@@ -0,0 +1,53 @@
# ADR-001: Cheerio Scraping Kütüphanesi Seçimi
## Durum
Kabul edildi
## Bağlam
Netflix içerik sayfalarından HTML parsing ile veri çekmemiz gerekiyor. İki ana seçenek var:
1. **Cheerio**: Lightweight HTML parser
2. **Playwright/Puppeteer**: Headless browser automation
## Karar
**Cheerio** seçildi.
## Gerekçe
### Cheerio Avantajları
- Hafif ve hızlı
- Düşük kaynak kullanımı
- Basit API
- Daha az bağımlılık
### Playwright Avantajları
- JavaScript rendering desteği
- Daha güçlü scraping
- Dinamik içerik desteği
### Seçim Nedeni
1. Netflix sayfalarının HTML'inde temel veriler mevcut
2. Client-side rendering gerektiren kritik veri yok
3. Performans öncelikli
4. Başlangıç için Cheerio yeterli
## Sonuçlar
### Olumlu
- Düşük kaynak kullanımı
- Hızlı yanıt süresi
- Basit bakım
### Olumsuz
- JavaScript rendering gerektiren sayfalar için çalışmayabilir
- Netflix client-side rendering'e geçerse güncelleme gerekir
### Alternatif Plan
Eğer Cheerio yetersiz kalırsa Playwright'a geçiş yapılabilir. Altyapı buna uygun hazır.
## Tarih
2025-02-27

View File

@@ -0,0 +1,61 @@
# ADR-002: Hybrid Cache Stratejisi
## Durum
Kabul edildi
## Bağlam
Netflix'ten scraped verileri nasıl saklayacağımıza karar vermemiz gerekiyor. Seçenekler:
1. **Sadece Cache**: Redis'te TTL ile sakla
2. **Sadece Database**: PostgreSQL'de kalıcı sakla
3. **Hybrid**: Cache + Database birlikte
## Karar
**Hybrid Cache Stratejisi** (Cache → DB → Netflix) seçildi.
## Akış
```
İstek → Redis Cache → Varsa dön
→ Yoksa → PostgreSQL → Varsa dön, Cache'e yaz
→ Yoksa → Netflix'ten çek → DB'ye yaz → Cache'e yaz → Dön
```
## Gerekçe
### Sadece Cache Eksikleri
- TTL dolduğunda veri kaybı
- Yeniden scraping maliyeti
### Sadece Database Eksikleri
- Her istekte DB sorgusu
- Daha yavaş yanıt
### Hybrid Avantajları
1. **Hız**: Cache hit durumunda anlık yanıt
2. **Kalıcılık**: Veriler DB'de saklanır
3. **Verimlilik**: Netflix'e gereksiz istek atılmaz
4. **TTL Esnekliği**: Cache süresi ayarlanabilir
## Sonuçlar
### Olumlu
- En hızlı yanıt süresi (cache hit)
- Veri kalıcılığı
- Netflix üzerinde minimal yük
### Olumsuz
- İki sistem senkronizasyonu
- Daha fazla kod karmaşıklığı
### Yapılandırma
```env
REDIS_TTL_SECONDS=604800 # 7 gün
```
## Tarih
2025-02-27

View File

@@ -0,0 +1,63 @@
# ADR-003: Named API Keys Stratejisi
## Durum
Kabul edildi
## Bağlam
API güvenliği için authentication yöntemi belirlememiz gerekiyor. Birden fazla frontend olacak (Web, Mobile, Admin).
## Seçenekler
1. **Tek API Key**: Tüm frontend'ler aynı key'i kullanır
2. **Named API Keys**: Her frontend için ayrı key
3. **Database API Keys**: Key'ler DB'de tutulur, yönetim paneli ile
## Karar
**Named API Keys** (.env'de tanımlı) seçildi.
## Yapılandırma
```env
API_KEY_WEB=web-frontend-key-xxx
API_KEY_MOBILE=mobile-app-key-yyy
API_KEY_ADMIN=admin-key-zzz
```
## Gerekçe
### Tek Key Eksikleri
- Hangi frontend'in istek attığı belli değil
- Tek bir frontend engellenemez
- Audit trail yok
### Database Key Eksikleri
- Ekstra kompleksite
- Yönetim paneli gerektirir
- Başlangıç için overkill
### Named Keys Avantajları
1. **İzlenebilirlik**: Hangi frontend'in istek attığı bilinir
2. **Kontrol**: Tek bir frontend'in erişimi kapatılabilir
3. **Basitlik**: .env ile yönetim
4. **Güvenlik**: Her frontend için ayrı secret
## Sonuçlar
### Olumlu
- Frontend bazlı rate limiting
- Kolay key rotasyonu
- Basit yapılandırma
### Olumsuz
- Yeni frontend için .env güncellemesi gerekir
- Key yönetimi manuel
### Gelecek
Gerekirse Database API Keys sistemine geçiş yapılabilir.
## Tarih
2025-02-27

291
doc/ops.md Normal file
View File

@@ -0,0 +1,291 @@
# Netflix Scraper API - Operasyon Dokümantasyonu
## Hızlı Başlangıç
### Gereksinimler
- Docker & Docker Compose
- Node.js 20+ (local development için)
### Tek Komut ile Başlatma (Development)
```bash
# .env dosyasını oluştur
cp .env.example .env
# .env dosyasını düzenle
nano .env
# Başlat
docker compose -f docker-compose.dev.yml up --build
```
**Hepsi bu kadar!** Uygulama şu adreste çalışacak: `http://localhost:3000`
---
## Ortamlar
### Development
```bash
docker compose -f docker-compose.dev.yml up --build
```
Özellikler:
- Hot reload aktif
- Tüm loglar görünür
- Debug modu
### Production
```bash
docker compose up --build -d
```
Özellikler:
- Multi-stage build
- Non-root user
- Minimal image
- Production optimizations
---
## Environment Değişkenleri
### .env Dosyası
```env
# === Server ===
PORT=3000
NODE_ENV=development
# === PostgreSQL ===
POSTGRES_HOST=postgres
POSTGRES_PORT=5432
POSTGRES_USER=postgres
POSTGRES_PASSWORD=your-secure-password
POSTGRES_DB=netflix_scraper
# === Redis ===
REDIS_HOST=redis
REDIS_PORT=6379
REDIS_TTL_SECONDS=604800
# === Rate Limiting ===
RATE_LIMIT_WINDOW_MS=60000
RATE_LIMIT_MAX_REQUESTS=30
# === API Keys ===
API_KEY_WEB=web-key-change-me
API_KEY_MOBILE=mobile-key-change-me
API_KEY_ADMIN=admin-key-secret
```
### Değişken Açıklamaları
| Değişken | Açıklama | Varsayılan |
|----------|----------|------------|
| `PORT` | Sunucu portu | 3000 |
| `NODE_ENV` | Ortam (development/production) | development |
| `REDIS_TTL_SECONDS` | Cache süresi (saniye) | 604800 (7 gün) |
| `RATE_LIMIT_WINDOW_MS` | Rate limit penceresi (ms) | 60000 (1 dk) |
| `RATE_LIMIT_MAX_REQUESTS` | Max istek sayısı | 30 |
---
## Migration
Migration'lar otomatik olarak container başlatılırken çalışır. Manuel çalıştırmak için:
```bash
# Container içinde
docker compose exec app npx prisma migrate deploy
# Local
npx prisma migrate deploy
```
### Yeni Migration Oluşturma
```bash
npx prisma migrate dev --name description_of_change
```
---
## Loglama
Tüm loglar JSON formatında structured logging ile yazılır.
### Log Seviyeleri
| Seviye | Açıklama |
|--------|----------|
| `debug` | Detaylı debug bilgisi |
| `info` | Genel bilgi |
| `warn` | Uyarı |
| `error` | Hata |
### Log Formatı
```json
{
"timestamp": "2025-02-27T10:00:00.000Z",
"level": "info",
"message": "Server started",
"service": "netflix-scraper-api",
"port": 3000
}
```
### Log Seviyesi Ayarlama
```env
LOG_LEVEL=debug
```
---
## Health Check
### Endpoints
- `GET /health` - Basit sağlık kontrolü
- `GET /ready` - Bağımlılık kontrolü (DB + Redis)
### Docker Health Check
Container'lar otomatik health check ile izlenir:
```yaml
healthcheck:
test: ["CMD", "wget", "--spider", "http://localhost:3000/health"]
interval: 30s
timeout: 10s
retries: 3
```
---
## Troubleshooting
### Database Bağlantı Hatası
```
Error: Can't reach database server
```
**Çözüm:**
1. PostgreSQL container'ın çalıştığını kontrol et
2. `POSTGRES_HOST` değerini kontrol et (docker'da `postgres` olmalı)
3. Health check'i bekle: `docker compose logs postgres`
### Redis Bağlantı Hatası
```
Error: Redis connection failed
```
**Çözüm:**
1. Redis container'ın çalıştığını kontrol et
2. `REDIS_HOST` değerini kontrol et
### Migration Hatası
```
Error: P3005: The database schema is not empty
```
**Çözüm:**
```bash
# Veritabanını sıfırla
docker compose down -v
docker compose up --build
```
### Port Kullanımda Hatası
```
Error: port is already allocated
```
**Çözüm:**
```bash
# Hangi process kullanıyor
lsof -i :3000
# Process'i durdur
kill -9 <PID>
```
---
## Yararlı Komutlar
### Container Yönetimi
```bash
# Container'ları durdur
docker compose down
# Container'ları sil (volumes dahil)
docker compose down -v
# Logları görüntüle
docker compose logs -f app
# Container içine gir
docker compose exec app sh
```
### Database İşlemleri
```bash
# Prisma studio aç
docker compose exec app npx prisma studio
# Database schema görüntüle
docker compose exec app npx prisma db pull
# Seed çalıştır
docker compose exec app npm run prisma:seed
```
### Redis İşlemleri
```bash
# Redis CLI
docker compose exec redis redis-cli
# Tüm cache'i temizle
docker compose exec redis redis-cli FLUSHALL
```
---
## Backup & Restore
### PostgreSQL Backup
```bash
docker compose exec postgres pg_dump -U postgres netflix_scraper > backup.sql
```
### PostgreSQL Restore
```bash
cat backup.sql | docker compose exec -T postgres psql -U postgres netflix_scraper
```
---
## Monitoring (Opsiyonel)
### Metrics Endpoint (Yakında)
```
GET /metrics
```
Prometheus uyumlu metrics döndürür.

82
doc/overview.md Normal file
View File

@@ -0,0 +1,82 @@
# Netflix Scraper API - Proje Özeti
## Proje Hakkında
Netflix Scraper API, Netflix içerik sayfalarından film/dizi bilgilerini çeken bir backend servisidir. URL gönderilerek içerik bilgileri çekilir, önbelleğe alınır ve geri döndürülür.
## Neden Oluşturuldu?
- **Otomatik İçerik Toplama**: Netflix URL'lerinden otomatik olarak içerik bilgisi çekmek
- **Performans**: Redis cache ile tekrarlayan isteklerde hızlı yanıt
- **Kalıcılık**: PostgreSQL ile verilerin kalıcı olarak saklanması
- **Gerçek Zamanlı**: Socket.IO ile canlı ilerleme bildirimleri
## Teknoloji Yığını
| Katman | Teknoloji |
|--------|-----------|
| Runtime | Node.js 20+ |
| Framework | Express.js |
| Database | PostgreSQL 16 |
| Cache | Redis 7 |
| Real-time | Socket.IO |
| Scraper | Cheerio |
| ORM | Prisma |
## Mimari
```
┌─────────────────┐
│ Frontend │
│ (Web/Mobile) │
└────────┬────────┘
│ HTTP + API Key
┌─────────────────┐ ┌─────────────┐
│ Express API │────▶│ Redis │
│ (Port 3000) │ │ (Cache) │
└────────┬────────┘ └─────────────┘
┌─────────────────┐ ┌─────────────┐
│ Scraper Service │────▶│ PostgreSQL │
│ (Cheerio) │ │ (Data) │
└─────────────────┘ └─────────────┘
```
## Veri Akışı
1. **İstek Alınır**: `POST /api/getinfo` endpoint'ine URL gönderilir
2. **Cache Kontrolü**: Redis'te veri var mı diye bakılır
3. **DB Kontrolü**: PostgreSQL'de veri var mı diye bakılır
4. **Scraping**: Netflix'ten veri çekilir (cache/DB'de yoksa)
5. **Kaydetme**: Veri DB'ye ve cache'e yazılır
6. **Yanıt**: JSON formatında veri döndürülür
## Önemli Kararlar
### ADR-001: Cheerio Seçimi
Playwright yerine Cheerio seçildi. Başlangıç için yeterli, daha hafif ve hızlı. Gerekirse Playwright'a geçiş yapılabilir.
### ADR-002: Hybrid Cache Stratejisi
Cache → DB → Netflix sıralaması ile veri alınır. Bu sayede:
- En hızlı yanıt cache'ten gelir
- Cache'te yoksa DB'den gelir
- İlk istek hariç Netflix'e istek atılmaz
### ADR-003: Named API Keys
Her frontend için ayrı API key kullanılır. Bu sayede:
- Hangi frontend'in istek attığı takip edilebilir
- Gerekirse tek bir frontend'in erişimi kapatılabilir
## Güncellemeler
| Tarih | Güncelleme |
|-------|-----------|
| 2025-02-27 | Proje oluşturuldu |
## İlgili Dokümanlar
- [API Dokümantasyonu](./api.md)
- [Operasyon Notları](./ops.md)
- [Socket Events](./socket-events.md)

256
doc/socket-events.md Normal file
View File

@@ -0,0 +1,256 @@
# Netflix Scraper API - Socket.IO Events
## Bağlantı
### URL
```
ws://localhost:3000
```
### Transport
- WebSocket
- Polling (fallback)
---
## Client → Server Events
### `job:subscribe`
Bir job'ın güncellemelerine abone ol.
**Payload:**
```typescript
jobId: string
```
**Örnek:**
```javascript
socket.emit('job:subscribe', '550e8400-e29b-41d4-a716-446655440000');
```
---
### `job:unsubscribe`
Job aboneliğini iptal et.
**Payload:**
```typescript
jobId: string
```
**Örnek:**
```javascript
socket.emit('job:unsubscribe', '550e8400-e29b-41d4-a716-446655440000');
```
---
## Server → Client Events
### `job:progress`
Job ilerleme durumu güncellendiğinde gönderilir.
**Payload:**
```typescript
{
jobId: string;
progress: number; // 0-100 arası
status: string; // "pending" | "processing" | "completed" | "failed"
step: string; // Mevcut adım açıklaması
}
```
**Örnek:**
```json
{
"jobId": "550e8400-e29b-41d4-a716-446655440000",
"progress": 50,
"status": "processing",
"step": "Scraping Netflix"
}
```
**Adımlar:**
| Step | Progress | Açıklama |
|------|----------|----------|
| `created` | 0 | Job oluşturuldu |
| `checking_cache` | 10 | Cache kontrol ediliyor |
| `checking_database` | 30 | Database kontrol ediliyor |
| `scraping_netflix` | 50 | Netflix'ten veri çekiliyor |
| `saving_to_database` | 80 | Veritabanına kaydediliyor |
| `completed` | 100 | İşlem tamamlandı |
---
### `job:completed`
Job başarıyla tamamlandığında gönderilir.
**Payload:**
```typescript
{
jobId: string;
data: GetInfoResponse;
source: "cache" | "database" | "netflix";
}
```
**Örnek:**
```json
{
"jobId": "550e8400-e29b-41d4-a716-446655440000",
"data": {
"title": "Hayata Röveşata Çeken Adam",
"year": 2022,
"plot": "...",
"genres": ["18+", "Komedi"],
"cast": ["Tom Hanks", "Mariana Treviño", "Rachel Keller"],
"backdrop": "https://..."
},
"source": "netflix"
}
```
---
### `job:error`
Job sırasında hata oluştuğunda gönderilir.
**Payload:**
```typescript
{
jobId: string;
error: {
code: string;
message: string;
}
}
```
**Örnek:**
```json
{
"jobId": "550e8400-e29b-41d4-a716-446655440000",
"error": {
"code": "SCRAPE_ERROR",
"message": "Failed to fetch Netflix page: 403"
}
}
```
---
## Kullanım Örneği
### JavaScript (Browser)
```javascript
import { io } from 'socket.io-client';
// Bağlan
const socket = io('http://localhost:3000');
// Bağlantı başarılı
socket.on('connect', () => {
console.log('Connected:', socket.id);
});
// Job oluştur (API üzerinden)
const response = await fetch('http://localhost:3000/api/getinfo/async', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'X-API-Key': 'your-api-key'
},
body: JSON.stringify({
url: 'https://www.netflix.com/tr/title/81616256'
})
});
const { data: { jobId } } = await response.json();
// Job'a abone ol
socket.emit('job:subscribe', jobId);
// İlerleme dinle
socket.on('job:progress', (data) => {
console.log(`Progress: ${data.progress}% - ${data.step}`);
});
// Tamamlanma dinle
socket.on('job:completed', (data) => {
console.log('Completed:', data.data);
socket.emit('job:unsubscribe', jobId);
});
// Hata dinle
socket.on('job:error', (data) => {
console.error('Error:', data.error);
});
```
### React Hook
```typescript
import { useEffect, useState } from 'react';
import { io, Socket } from 'socket.io-client';
interface JobProgress {
jobId: string;
progress: number;
status: string;
step: string;
}
export function useJobProgress(jobId: string | null) {
const [progress, setProgress] = useState<JobProgress | null>(null);
const [data, setData] = useState<any>(null);
const [error, setError] = useState<any>(null);
useEffect(() => {
if (!jobId) return;
const socket = io('http://localhost:3000');
socket.emit('job:subscribe', jobId);
socket.on('job:progress', setProgress);
socket.on('job:completed', (result) => {
setData(result.data);
socket.emit('job:unsubscribe', jobId);
});
socket.on('job:error', (err) => {
setError(err.error);
socket.emit('job:unsubscribe', jobId);
});
return () => {
socket.emit('job:unsubscribe', jobId);
socket.disconnect();
};
}, [jobId]);
return { progress, data, error };
}
```
---
## CORS Yapılandırması
Production'da CORS ayarlarını yapılandırın:
```typescript
// src/config/socket.ts
io = new Server(httpServer, {
cors: {
origin: ['https://yourdomain.com', 'https://admin.yourdomain.com'],
methods: ['GET', 'POST'],
credentials: true,
},
});
```

95
docker-compose.dev.yml Normal file
View File

@@ -0,0 +1,95 @@
# ===========================================
# Development Docker Compose
# Hot reload enabled, single command startup
# ===========================================
services:
app:
build:
context: .
dockerfile: Dockerfile.dev
container_name: netflix-scraper-api-dev
restart: unless-stopped
ports:
- "${PORT:-3000}:3000"
environment:
- NODE_ENV=development
- PORT=3000
- POSTGRES_HOST=postgres
- POSTGRES_PORT=5432
- POSTGRES_USER=${POSTGRES_USER:-postgres}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
- POSTGRES_DB=${POSTGRES_DB:-netflix_scraper}
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_TTL_SECONDS=${REDIS_TTL_SECONDS:-604800}
- RATE_LIMIT_WINDOW_MS=${RATE_LIMIT_WINDOW_MS:-60000}
- RATE_LIMIT_MAX_REQUESTS=${RATE_LIMIT_MAX_REQUESTS:-30}
- API_KEY_WEB=${API_KEY_WEB:-web-dev-key-change-me}
- API_KEY_MOBILE=${API_KEY_MOBILE:-mobile-dev-key-change-me}
- API_KEY_ADMIN=${API_KEY_ADMIN:-admin-dev-key-secret}
- TMDB_API_KEY=${TMDB_API_KEY:-your-tmdb-api-key-here}
- TMDB_ACCESS_TOKEN=${TMDB_ACCESS_TOKEN:-your-tmdb-access-token-here}
volumes:
- ./src:/app/src:delegated
- ./prisma:/app/prisma:delegated
- ./package.json:/app/package.json:delegated
- node_modules_data:/app/node_modules
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
networks:
- netflix-scraper-network
postgres:
image: postgres:16-alpine
container_name: netflix-scraper-postgres-dev
restart: unless-stopped
ports:
- "5432:5432"
environment:
- POSTGRES_USER=${POSTGRES_USER:-postgres}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-postgres}
- POSTGRES_DB=${POSTGRES_DB:-netflix_scraper}
volumes:
- postgres_data_dev:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-netflix_scraper}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
networks:
- netflix-scraper-network
redis:
image: redis:7-alpine
container_name: netflix-scraper-redis-dev
restart: unless-stopped
command: redis-server --appendonly yes
volumes:
- redis_data_dev:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
start_period: 5s
networks:
- netflix-scraper-network
volumes:
postgres_data_dev:
redis_data_dev:
node_modules_data:
networks:
netflix-scraper-network:
driver: bridge

94
docker-compose.yml Normal file
View File

@@ -0,0 +1,94 @@
# ===========================================
# Production Docker Compose
# Optimized for production deployment
# ===========================================
services:
app:
build:
context: .
dockerfile: Dockerfile
container_name: netflix-scraper-api
restart: unless-stopped
ports:
- "${PORT:-3000}:3000"
environment:
- NODE_ENV=production
- PORT=3000
- POSTGRES_HOST=postgres
- POSTGRES_PORT=5432
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
- REDIS_HOST=redis
- REDIS_PORT=6379
- REDIS_TTL_SECONDS=${REDIS_TTL_SECONDS:-604800}
- RATE_LIMIT_WINDOW_MS=${RATE_LIMIT_WINDOW_MS:-60000}
- RATE_LIMIT_MAX_REQUESTS=${RATE_LIMIT_MAX_REQUESTS:-30}
- API_KEY_WEB=${API_KEY_WEB}
- API_KEY_MOBILE=${API_KEY_MOBILE}
- API_KEY_ADMIN=${API_KEY_ADMIN}
depends_on:
postgres:
condition: service_healthy
redis:
condition: service_healthy
healthcheck:
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:3000/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 30s
networks:
- netflix-scraper-network
security_opt:
- no-new-privileges:true
read_only: true
tmpfs:
- /tmp
postgres:
image: postgres:16-alpine
container_name: netflix-scraper-postgres
restart: unless-stopped
environment:
- POSTGRES_USER=${POSTGRES_USER}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
- POSTGRES_DB=${POSTGRES_DB}
volumes:
- postgres_data:/var/lib/postgresql/data
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"]
interval: 10s
timeout: 5s
retries: 5
start_period: 10s
networks:
- netflix-scraper-network
security_opt:
- no-new-privileges:true
redis:
image: redis:7-alpine
container_name: netflix-scraper-redis
restart: unless-stopped
command: redis-server --appendonly yes
volumes:
- redis_data:/data
healthcheck:
test: ["CMD", "redis-cli", "ping"]
interval: 10s
timeout: 5s
retries: 5
start_period: 5s
networks:
- netflix-scraper-network
security_opt:
- no-new-privileges:true
volumes:
postgres_data:
redis_data:
networks:
netflix-scraper-network:
driver: bridge

View File

@@ -0,0 +1,29 @@
---
date: 2026-02-27
topic: db-content-list
---
# DB Content List (Web)
## What We're Building
Veritabanında kayıtlı film ve dizileri web arayüzünde kart görünümü ile listeleyeceğiz. Her kartta poster/backdrop görseli, başlık, tür, yıl ve temel içerik bilgileri gösterilecek.
İlk sürümde veri kaynağı yalnızca mevcut DB olacak. Harici API ile poster tamamlama veya enrichment yapılmayacak.
## Why This Approach
Seçilen yaklaşım: server API endpoint + frontend Mantine card grid.
Bu yaklaşım, hızlı teslim ve temiz ayrım sağlar: backend yalnızca kayıtlı içerikleri döner, frontend görselleştirmeyi üstlenir. Sonradan filtre, pagination ve sıralama gibi eklemeler düşük maliyetle yapılabilir.
## Key Decisions
- Web arayüzü kullanılacak: Kullanıcı hedefi doğrudan UI üzerinden liste görmek.
- Kart görünümü seçildi: Poster + temel metadata ile hızlı taranabilir deneyim.
- Veri kaynağı sadece DB: İlk sürümde kapsam kontrolü ve hızlı teslim için.
- Mantine bileşenleri kullanılacak: Mevcut frontend stack ile uyum için.
## Open Questions
- Pagination ilk sürümde gerekli mi, yoksa 100 kayıt sınırı yeterli mi?
- Kartta hangi alanlar zorunlu tutulmalı (oy puanı, cast, genre sayısı)?
## Next Steps
-> `/workflows:plan`

24
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1,24 @@
# Logs
logs
*.log
npm-debug.log*
yarn-debug.log*
yarn-error.log*
pnpm-debug.log*
lerna-debug.log*
node_modules
dist
dist-ssr
*.local
# Editor directories and files
.vscode/*
!.vscode/extensions.json
.idea
.DS_Store
*.suo
*.ntvs*
*.njsproj
*.sln
*.sw?

73
frontend/README.md Normal file
View File

@@ -0,0 +1,73 @@
# React + TypeScript + Vite
This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.
Currently, two official plugins are available:
- [@vitejs/plugin-react](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react) uses [Babel](https://babeljs.io/) (or [oxc](https://oxc.rs) when used in [rolldown-vite](https://vite.dev/guide/rolldown)) for Fast Refresh
- [@vitejs/plugin-react-swc](https://github.com/vitejs/vite-plugin-react/blob/main/packages/plugin-react-swc) uses [SWC](https://swc.rs/) for Fast Refresh
## React Compiler
The React Compiler is not enabled on this template because of its impact on dev & build performances. To add it, see [this documentation](https://react.dev/learn/react-compiler/installation).
## Expanding the ESLint configuration
If you are developing a production application, we recommend updating the configuration to enable type-aware lint rules:
```js
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Remove tseslint.configs.recommended and replace with this
tseslint.configs.recommendedTypeChecked,
// Alternatively, use this for stricter rules
tseslint.configs.strictTypeChecked,
// Optionally, add this for stylistic rules
tseslint.configs.stylisticTypeChecked,
// Other configs...
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```
You can also install [eslint-plugin-react-x](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-x) and [eslint-plugin-react-dom](https://github.com/Rel1cx/eslint-react/tree/main/packages/plugins/eslint-plugin-react-dom) for React-specific lint rules:
```js
// eslint.config.js
import reactX from 'eslint-plugin-react-x'
import reactDom from 'eslint-plugin-react-dom'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
// Other configs...
// Enable lint rules for React
reactX.configs['recommended-typescript'],
// Enable lint rules for React DOM
reactDom.configs.recommended,
],
languageOptions: {
parserOptions: {
project: ['./tsconfig.node.json', './tsconfig.app.json'],
tsconfigRootDir: import.meta.dirname,
},
// other options...
},
},
])
```

23
frontend/eslint.config.js Normal file
View File

@@ -0,0 +1,23 @@
import js from '@eslint/js'
import globals from 'globals'
import reactHooks from 'eslint-plugin-react-hooks'
import reactRefresh from 'eslint-plugin-react-refresh'
import tseslint from 'typescript-eslint'
import { defineConfig, globalIgnores } from 'eslint/config'
export default defineConfig([
globalIgnores(['dist']),
{
files: ['**/*.{ts,tsx}'],
extends: [
js.configs.recommended,
tseslint.configs.recommended,
reactHooks.configs.flat.recommended,
reactRefresh.configs.vite,
],
languageOptions: {
ecmaVersion: 2020,
globals: globals.browser,
},
},
])

13
frontend/index.html Normal file
View File

@@ -0,0 +1,13 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/vite.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>frontend</title>
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

3865
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

45
frontend/package.json Normal file
View File

@@ -0,0 +1,45 @@
{
"name": "frontend",
"private": true,
"version": "0.0.0",
"type": "module",
"scripts": {
"dev": "vite",
"build": "tsc -b && vite build",
"lint": "eslint .",
"preview": "vite preview"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^7.2.0",
"@fortawesome/free-brands-svg-icons": "^7.2.0",
"@fortawesome/free-solid-svg-icons": "^7.2.0",
"@fortawesome/react-fontawesome": "^3.2.0",
"@mantine/core": "^8.3.15",
"@mantine/hooks": "^8.3.15",
"@tabler/icons-react": "^3.37.1",
"postcss": "^8.5.6",
"postcss-preset-mantine": "^1.18.0",
"postcss-simple-vars": "^7.0.1",
"react": "^19.2.0",
"react-dom": "^19.2.0"
},
"devDependencies": {
"@eslint/js": "^9.39.1",
"@types/node": "^24.10.1",
"@types/react": "^19.2.7",
"@types/react-dom": "^19.2.3",
"@vitejs/plugin-react": "^5.1.1",
"eslint": "^9.39.1",
"eslint-plugin-react-hooks": "^7.0.1",
"eslint-plugin-react-refresh": "^0.4.24",
"globals": "^16.5.0",
"typescript": "~5.9.3",
"typescript-eslint": "^8.48.0",
"vite": "^7.3.1"
},
"description": "This template provides a minimal setup to get React working in Vite with HMR and some ESLint rules.",
"main": "eslint.config.js",
"keywords": [],
"author": "",
"license": "ISC"
}

View File

@@ -0,0 +1,14 @@
module.exports = {
plugins: {
'postcss-preset-mantine': {},
'postcss-simple-vars': {
variables: {
'mantine-breakpoint-xs': '36em',
'mantine-breakpoint-sm': '48em',
'mantine-breakpoint-md': '62em',
'mantine-breakpoint-lg': '75em',
'mantine-breakpoint-xl': '88em',
},
},
},
};

BIN
frontend/public/netflix.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.5 KiB

1
frontend/public/vite.svg Normal file
View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="31.88" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 257"><defs><linearGradient id="IconifyId1813088fe1fbc01fb466" x1="-.828%" x2="57.636%" y1="7.652%" y2="78.411%"><stop offset="0%" stop-color="#41D1FF"></stop><stop offset="100%" stop-color="#BD34FE"></stop></linearGradient><linearGradient id="IconifyId1813088fe1fbc01fb467" x1="43.376%" x2="50.316%" y1="2.242%" y2="89.03%"><stop offset="0%" stop-color="#FFEA83"></stop><stop offset="8.333%" stop-color="#FFDD35"></stop><stop offset="100%" stop-color="#FFA800"></stop></linearGradient></defs><path fill="url(#IconifyId1813088fe1fbc01fb466)" d="M255.153 37.938L134.897 252.976c-2.483 4.44-8.862 4.466-11.382.048L.875 37.958c-2.746-4.814 1.371-10.646 6.827-9.67l120.385 21.517a6.537 6.537 0 0 0 2.322-.004l117.867-21.483c5.438-.991 9.574 4.796 6.877 9.62Z"></path><path fill="url(#IconifyId1813088fe1fbc01fb467)" d="M185.432.063L96.44 17.501a3.268 3.268 0 0 0-2.634 3.014l-5.474 92.456a3.268 3.268 0 0 0 3.997 3.378l24.777-5.718c2.318-.535 4.413 1.507 3.936 3.838l-7.361 36.047c-.495 2.426 1.782 4.5 4.151 3.78l15.304-4.649c2.372-.72 4.652 1.36 4.15 3.788l-11.698 56.621c-.732 3.542 3.979 5.473 5.943 2.437l1.313-2.028l72.516-144.72c1.215-2.423-.88-5.186-3.54-4.672l-25.505 4.922c-2.396.462-4.435-1.77-3.759-4.114l16.646-57.705c.677-2.35-1.37-4.583-3.769-4.113Z"></path></svg>

After

Width:  |  Height:  |  Size: 1.5 KiB

2
frontend/src/App.css Normal file
View File

@@ -0,0 +1,2 @@
/* App styles - Mantine handles most styling */

8
frontend/src/App.tsx Normal file
View File

@@ -0,0 +1,8 @@
import { MoviesPage } from './pages/MoviesPage'
import './App.css'
function App() {
return <MoviesPage />
}
export default App

View File

@@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>

After

Width:  |  Height:  |  Size: 4.0 KiB

10
frontend/src/index.css Normal file
View File

@@ -0,0 +1,10 @@
body {
margin: 0;
padding: 0;
background-color: #1a1a1b;
min-height: 100vh;
}
#root {
min-height: 100vh;
}

19
frontend/src/main.tsx Normal file
View File

@@ -0,0 +1,19 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import { MantineProvider, createTheme } from '@mantine/core'
import '@mantine/core/styles.css'
import './index.css'
import App from './App.tsx'
const theme = createTheme({
primaryColor: 'red',
fontFamily: 'Inter, system-ui, Avenir, Helvetica, Arial, sans-serif',
})
createRoot(document.getElementById('root')!).render(
<StrictMode>
<MantineProvider theme={theme} defaultColorScheme="dark">
<App />
</MantineProvider>
</StrictMode>,
)

View File

@@ -0,0 +1,208 @@
import { useEffect, useMemo, useState } from 'react'
import {
Alert,
Badge,
Card,
Container,
Grid,
Group,
Image,
Loader,
Paper,
SegmentedControl,
Stack,
Text,
Title,
} from '@mantine/core'
import { IconAlertCircle, IconDeviceTv, IconMovie } from '@tabler/icons-react'
type ContentType = 'movie' | 'tvshow'
interface ContentListItem {
title: string
year: number | null
plot: string | null
ageRating: string | null
type: ContentType
genres: string[]
backdrop: string | null
currentSeason: number | null
}
interface ContentListResponse {
success: boolean
data?: ContentListItem[]
error?: {
message: string
}
}
export function MoviesPage() {
const [items, setItems] = useState<ContentListItem[]>([])
const [loading, setLoading] = useState(true)
const [error, setError] = useState<string | null>(null)
const [typeFilter, setTypeFilter] = useState<'all' | ContentType>('all')
useEffect(() => {
const controller = new AbortController()
const loadContent = async () => {
setLoading(true)
setError(null)
try {
const params = new URLSearchParams()
params.append('limit', '100')
if (typeFilter !== 'all') {
params.append('type', typeFilter)
}
const response = await fetch(`/api/content?${params.toString()}`, {
method: 'GET',
headers: {
'X-API-Key': 'web-dev-key-change-me',
},
signal: controller.signal,
})
const data: ContentListResponse = await response.json()
if (data.success && data.data) {
setItems(data.data)
return
}
setError(data.error?.message || 'Liste alınamadı')
} catch {
if (!controller.signal.aborted) {
setError('Bağlantı hatası')
}
} finally {
if (!controller.signal.aborted) {
setLoading(false)
}
}
}
void loadContent()
return () => controller.abort()
}, [typeFilter])
const pageTitle = useMemo(() => {
if (typeFilter === 'movie') return 'Film Listesi'
if (typeFilter === 'tvshow') return 'Dizi Listesi'
return 'Film ve Dizi Listesi'
}, [typeFilter])
return (
<Container size="xl" py="xl">
<Stack gap="lg">
<Paper radius="md" p="lg" withBorder>
<Group justify="space-between" align="end">
<div>
<Title order={1}>{pageTitle}</Title>
<Text c="dimmed" size="sm">
Veriler doğrudan veritabanından okunur.
</Text>
</div>
<SegmentedControl
value={typeFilter}
onChange={(value) => setTypeFilter(value as 'all' | ContentType)}
data={[
{ label: 'Tümü', value: 'all' },
{ label: 'Filmler', value: 'movie' },
{ label: 'Diziler', value: 'tvshow' },
]}
/>
</Group>
</Paper>
{error && (
<Alert icon={<IconAlertCircle size={16} />} color="red" title="Hata">
{error}
</Alert>
)}
{loading ? (
<Group justify="center" py="xl">
<Loader size="lg" />
</Group>
) : (
<Grid>
{items.map((item) => (
<Grid.Col key={`${item.type}-${item.title}-${item.year ?? 'na'}`} span={{ base: 12, sm: 6, md: 4 }}>
<Card shadow="sm" radius="md" padding="md" withBorder style={{ position: 'relative' }}>
<Card.Section>
{item.backdrop ? (
<Image src={item.backdrop} alt={item.title} h={180} />
) : (
<Group h={180} justify="center" style={{ backgroundColor: '#262626' }}>
{item.type === 'movie' ? (
<IconMovie size={48} color="#8a8a8a" />
) : (
<IconDeviceTv size={48} color="#8a8a8a" />
)}
</Group>
)}
</Card.Section>
<Stack gap="xs" mt="md" pb="lg">
<Group justify="space-between" align="start">
<Text fw={700} lineClamp={2}>
{item.title}
</Text>
<Badge color={item.type === 'movie' ? 'red' : 'blue'}>
{item.type === 'movie' ? 'Film' : 'Dizi'}
</Badge>
</Group>
<Group gap="xs">
{item.year && <Badge variant="light">{item.year}</Badge>}
{item.ageRating && <Badge variant="outline">{item.ageRating}</Badge>}
{item.currentSeason && item.type === 'tvshow' && (
<Badge variant="light" color="grape">
S{item.currentSeason}
</Badge>
)}
</Group>
<Text size="sm" c="dimmed" lineClamp={3}>
{item.plot || 'Açıklama bulunamadı.'}
</Text>
<Group gap={6}>
{item.genres.slice(0, 3).map((genre) => (
<Badge key={genre} size="sm" variant="dot">
{genre}
</Badge>
))}
</Group>
</Stack>
<img
src="/netflix.png"
alt="Netflix"
style={{
position: 'absolute',
right: 12,
bottom: 12,
width: 30,
height: 30,
objectFit: 'contain',
opacity: 0.95,
}}
/>
</Card>
</Grid.Col>
))}
</Grid>
)}
{!loading && !error && items.length === 0 && (
<Text c="dimmed" ta="center">
Kayıt bulunamadı.
</Text>
)}
</Stack>
</Container>
)
}

View File

@@ -0,0 +1,28 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo",
"target": "ES2022",
"useDefineForClassFields": true,
"lib": ["ES2022", "DOM", "DOM.Iterable"],
"module": "ESNext",
"types": ["vite/client"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
"jsx": "react-jsx",
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["src"]
}

7
frontend/tsconfig.json Normal file
View File

@@ -0,0 +1,7 @@
{
"files": [],
"references": [
{ "path": "./tsconfig.app.json" },
{ "path": "./tsconfig.node.json" }
]
}

View File

@@ -0,0 +1,26 @@
{
"compilerOptions": {
"tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo",
"target": "ES2023",
"lib": ["ES2023"],
"module": "ESNext",
"types": ["node"],
"skipLibCheck": true,
/* Bundler mode */
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"moduleDetection": "force",
"noEmit": true,
/* Linting */
"strict": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"erasableSyntaxOnly": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedSideEffectImports": true
},
"include": ["vite.config.ts"]
}

21
frontend/vite.config.ts Normal file
View File

@@ -0,0 +1,21 @@
import { defineConfig } from 'vite'
import react from '@vitejs/plugin-react'
// https://vite.dev/config/
export default defineConfig({
plugins: [react()],
server: {
port: 5173,
proxy: {
'/api': {
target: 'http://localhost:3000',
changeOrigin: true,
},
},
},
css: {
modules: {
localsConvention: 'camelCase',
},
},
})

2301
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

37
package.json Normal file
View File

@@ -0,0 +1,37 @@
{
"name": "netflix-scraper-api",
"version": "1.0.0",
"description": "Netflix content scraper API with caching and real-time updates",
"main": "dist/index.js",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc",
"start": "node dist/index.js",
"prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate deploy",
"prisma:seed": "tsx prisma/seed.ts",
"lint": "eslint src --ext .ts",
"typecheck": "tsc --noEmit"
},
"dependencies": {
"@prisma/client": "^5.22.0",
"cheerio": "^1.0.0",
"express": "^4.21.1",
"express-rate-limit": "^7.4.1",
"ioredis": "^5.4.1",
"socket.io": "^4.8.1",
"uuid": "^10.0.0",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/node": "^22.9.0",
"@types/uuid": "^10.0.0",
"prisma": "^5.22.0",
"tsx": "^4.19.2",
"typescript": "^5.6.3"
},
"engines": {
"node": ">=20.0.0"
}
}

View File

@@ -0,0 +1,95 @@
-- CreateEnum
CREATE TYPE "ScrapeJobStatus" AS ENUM ('pending', 'processing', 'completed', 'failed');
-- CreateTable
CREATE TABLE "content" (
"id" TEXT NOT NULL,
"url" VARCHAR(500) NOT NULL,
"title" VARCHAR(255) NOT NULL,
"year" INTEGER,
"plot" TEXT,
"backdropUrl" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "content_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "genres" (
"id" TEXT NOT NULL,
"name" VARCHAR(100) NOT NULL,
CONSTRAINT "genres_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "content_genres" (
"contentId" TEXT NOT NULL,
"genreId" TEXT NOT NULL,
CONSTRAINT "content_genres_pkey" PRIMARY KEY ("contentId","genreId")
);
-- CreateTable
CREATE TABLE "cast_members" (
"id" TEXT NOT NULL,
"contentId" TEXT NOT NULL,
"name" VARCHAR(255) NOT NULL,
CONSTRAINT "cast_members_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "scrape_jobs" (
"id" TEXT NOT NULL,
"url" VARCHAR(500) NOT NULL,
"status" VARCHAR(20) NOT NULL DEFAULT 'pending',
"progress" INTEGER NOT NULL DEFAULT 0,
"step" VARCHAR(100),
"error" TEXT,
"result" JSONB,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "scrape_jobs_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "content_url_key" ON "content"("url");
-- CreateIndex
CREATE INDEX "content_url_idx" ON "content"("url");
-- CreateIndex
CREATE INDEX "content_title_idx" ON "content"("title");
-- CreateIndex
CREATE INDEX "content_year_idx" ON "content"("year");
-- CreateIndex
CREATE UNIQUE INDEX "genres_name_key" ON "genres"("name");
-- CreateIndex
CREATE INDEX "genres_name_idx" ON "genres"("name");
-- CreateIndex
CREATE INDEX "cast_members_contentId_idx" ON "cast_members"("contentId");
-- CreateIndex
CREATE INDEX "cast_members_name_idx" ON "cast_members"("name");
-- CreateIndex
CREATE INDEX "scrape_jobs_url_idx" ON "scrape_jobs"("url");
-- CreateIndex
CREATE INDEX "scrape_jobs_status_idx" ON "scrape_jobs"("status");
-- AddForeignKey
ALTER TABLE "content_genres" ADD CONSTRAINT "content_genres_contentId_fkey" FOREIGN KEY ("contentId") REFERENCES "content"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "content_genres" ADD CONSTRAINT "content_genres_genreId_fkey" FOREIGN KEY ("genreId") REFERENCES "genres"("id") ON DELETE CASCADE ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "cast_members" ADD CONSTRAINT "cast_members_contentId_fkey" FOREIGN KEY ("contentId") REFERENCES "content"("id") ON DELETE CASCADE ON UPDATE CASCADE;

View File

@@ -0,0 +1,2 @@
-- Add ageRating column to content table
ALTER TABLE "content" ADD COLUMN "ageRating" VARCHAR(10);

View File

@@ -0,0 +1,5 @@
-- Add type column to content table
ALTER TABLE "content" ADD COLUMN "type" VARCHAR(10) NOT NULL DEFAULT 'movie';
-- Create index for type field
CREATE INDEX "content_type_idx" ON "content"("type");

View File

@@ -0,0 +1,5 @@
-- Add currentSeason column to content table
ALTER TABLE "content" ADD COLUMN "currentSeason" INTEGER;
-- Create index for currentSeason field
CREATE INDEX "content_current_season_idx" ON "content"("currentSeason");

View File

@@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"

103
prisma/schema.prisma Normal file
View File

@@ -0,0 +1,103 @@
// Prisma Schema for Netflix Scraper API
// Database: PostgreSQL
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
// ============================================
// Content Tables
// ============================================
/// Main content table for scraped Netflix data
model Content {
id String @id @default(uuid())
url String @unique @db.VarChar(500)
title String @db.VarChar(255)
year Int?
plot String? @db.Text
backdropUrl String? @db.Text
ageRating String? @db.VarChar(10)
type String @default("movie") @db.VarChar(10) // movie or tvshow
currentSeason Int? // Current season number for TV shows
// Relations
genres ContentGenre[]
castMembers CastMember[]
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([url])
@@index([title])
@@index([year])
@@index([type])
@@index([currentSeason])
@@map("content")
}
/// Genres lookup table
model Genre {
id String @id @default(uuid())
name String @unique @db.VarChar(100)
// Relations
contents ContentGenre[]
@@index([name])
@@map("genres")
}
/// Content-Genre many-to-many relationship
model ContentGenre {
contentId String
genreId String
content Content @relation(fields: [contentId], references: [id], onDelete: Cascade)
genre Genre @relation(fields: [genreId], references: [id], onDelete: Cascade)
@@id([contentId, genreId])
@@map("content_genres")
}
/// Cast members for content
model CastMember {
id String @id @default(uuid())
contentId String
name String @db.VarChar(255)
content Content @relation(fields: [contentId], references: [id], onDelete: Cascade)
@@index([contentId])
@@index([name])
@@map("cast_members")
}
// ============================================
// Job Queue Table (for async processing)
// ============================================
/// Scrape job queue
model ScrapeJob {
id String @id @default(uuid())
url String @db.VarChar(500)
status String @default("pending") @db.VarChar(20) // pending, processing, completed, failed
progress Int @default(0)
step String? @db.VarChar(100)
error String? @db.Text
// Result stored as JSON when completed
result Json?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@index([url])
@@index([status])
@@map("scrape_jobs")
}

49
prisma/seed.ts Normal file
View File

@@ -0,0 +1,49 @@
import { PrismaClient } from '@prisma/client';
const prisma = new PrismaClient();
/**
* Seed script for initial data
* Run with: npx tsx prisma/seed.ts
*/
async function main() {
console.log('Seeding database...');
// Seed default genres
const genres = [
'Aksiyon',
'Komedi',
'Dram',
'Korku',
'Romantik',
'Bilim Kurgu',
'Gerilim',
'Belgesel',
'Animasyon',
'Aile',
'18+',
'16+',
'13+',
'7+',
];
for (const genreName of genres) {
await prisma.genre.upsert({
where: { name: genreName },
update: {},
create: { name: genreName },
});
}
console.log(`Seeded ${genres.length} genres`);
console.log('Seed completed successfully!');
}
main()
.catch((e) => {
console.error('Seed failed:', e);
process.exit(1);
})
.finally(async () => {
await prisma.$disconnect();
});

34
scripts/dev-startup.sh Executable file
View File

@@ -0,0 +1,34 @@
#!/bin/sh
set -e
echo "=== Netflix Scraper API Dev Startup ==="
# Set DATABASE_URL from individual POSTGRES_* environment variables
# This overrides the dummy value set during Docker build
export DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
echo "Database URL configured: postgresql://${POSTGRES_USER}:***@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
# Wait for database to be ready
echo "Waiting for database..."
until nc -z $POSTGRES_HOST $POSTGRES_PORT; do
echo "Database not ready, waiting..."
sleep 2
done
echo "Database is ready!"
# Generate Prisma client
echo "Generating Prisma client..."
npx prisma generate
# Run migrations
echo "Running database migrations..."
npx prisma migrate deploy
# Run seed (optional)
echo "Running seed..."
npx tsx prisma/seed.ts || echo "Seed already run or not needed"
# Start the application in dev mode
echo "Starting application in development mode..."
exec npm run dev

30
scripts/startup.sh Executable file
View File

@@ -0,0 +1,30 @@
#!/bin/sh
set -e
echo "=== Netflix Scraper API Startup ==="
# Set DATABASE_URL from individual POSTGRES_* environment variables
# This overrides the dummy value set during Docker build
export DATABASE_URL="postgresql://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
echo "Database URL configured: postgresql://${POSTGRES_USER}:***@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
# Wait for database to be ready
echo "Waiting for database..."
until nc -z $POSTGRES_HOST $POSTGRES_PORT; do
echo "Database not ready, waiting..."
sleep 2
done
echo "Database is ready!"
# Run migrations
echo "Running database migrations..."
npx prisma migrate deploy
# Run seed (optional, won't fail if already seeded)
echo "Running seed..."
npx tsx prisma/seed.ts || echo "Seed already run or not needed"
# Start the application
echo "Starting application..."
exec node dist/index.js

View File

@@ -0,0 +1,104 @@
---
name: brainstorming
description: Clarify what to build before planning or implementation by exploring intent, constraints, trade-offs, and success criteria. Use when requests are ambiguous, have multiple valid interpretations, include phrases like "let's brainstorm" or "help me think through", or when feature scope needs refinement before writing an implementation plan.
---
# Brainstorming
Guide collaborative brainstorming sessions that define WHAT to build before HOW to build it. Keep output short, decision-oriented, and validated with the user at each step.
## Phase 0: Assess Clarity
Decide whether brainstorming is necessary before asking discovery questions.
Signals to skip brainstorming and proceed:
- Requirements include concrete acceptance criteria.
- Expected behavior and boundaries are explicit.
- Task is a straightforward bug fix or well-scoped change.
If clarity is already high, say:
- "Requirements look clear. Proceed directly to planning or implementation."
## Phase 1: Understand the Idea
Ask one question at a time. Prefer multiple-choice when natural options exist.
Question order:
1. Purpose: problem and motivation.
2. Users: who uses it and in what context.
3. Constraints: technical, timeline, dependencies.
4. Success: how "good" is measured.
5. Edge cases: failures, exclusions, non-goals.
6. Existing patterns: similar feature to follow.
Validate assumptions explicitly:
- "I assume users are authenticated. Correct?"
Exit this phase when the request is clear or the user says "proceed".
## Phase 2: Explore Approaches
Provide 2-3 options with explicit trade-offs and a recommendation.
Use this structure:
### Approach A: <Name>
<2-3 sentence summary>
Pros:
- <benefit>
- <benefit>
Cons:
- <trade-off>
- <trade-off>
Best when:
- <condition>
Apply YAGNI:
- Prefer the simplest approach that solves the current problem.
- Defer speculative complexity.
## Phase 3: Capture Decisions
Write a concise brainstorm note to:
- `docs/brainstorms/YYYY-MM-DD-<topic>-brainstorm.md`
Use this format:
```markdown
---
date: YYYY-MM-DD
topic: <kebab-case-topic>
---
# <Topic Title>
## What We're Building
<1-2 short paragraphs>
## Why This Approach
<brief comparison and rationale>
## Key Decisions
- <decision>: <rationale>
- <decision>: <rationale>
## Open Questions
- <unresolved item>
## Next Steps
-> `/workflows:plan`
```
## Phase 4: Handoff
Offer exactly one clear next action:
1. Proceed to planning.
2. Refine assumptions.
3. Pause and resume later.
Keep each response section short (about 200-300 words max), then confirm alignment:
- "Does this match your intent?"
- "Any adjustments before we continue?"

View File

@@ -0,0 +1,4 @@
interface:
display_name: "Brainstorming"
short_description: "Clarify what to build before planning or implementation."
default_prompt: "Use this skill to clarify ambiguous requests, compare approaches, and capture decisions before planning."

63
src/config/database.ts Normal file
View File

@@ -0,0 +1,63 @@
import { PrismaClient } from '@prisma/client';
import { getDatabaseUrl, env } from './env.js';
import logger from '../utils/logger.js';
// Set database URL for Prisma
process.env.DATABASE_URL = getDatabaseUrl();
/**
* Prisma Client singleton
* Handles connection pooling and retries
*/
const globalForPrisma = globalThis as unknown as {
prisma: PrismaClient | undefined;
};
export const prisma =
globalForPrisma.prisma ??
new PrismaClient({
log:
env.NODE_ENV === 'development'
? ['query', 'error', 'warn']
: ['error'],
});
if (env.NODE_ENV !== 'production') {
globalForPrisma.prisma = prisma;
}
/**
* Connect to database with retry logic
*/
export async function connectDatabase(retries = 5, delay = 2000): Promise<void> {
for (let i = 0; i < retries; i++) {
try {
await prisma.$connect();
logger.info('Database connected successfully', {
host: env.POSTGRES_HOST,
database: env.POSTGRES_DB,
});
return;
} catch (error) {
logger.warn(`Database connection attempt ${i + 1}/${retries} failed`, {
error: error instanceof Error ? error.message : 'Unknown error',
});
if (i < retries - 1) {
await new Promise((resolve) => setTimeout(resolve, delay));
}
}
}
throw new Error('Failed to connect to database after maximum retries');
}
/**
* Disconnect from database
*/
export async function disconnectDatabase(): Promise<void> {
await prisma.$disconnect();
logger.info('Database disconnected');
}
export default prisma;

77
src/config/env.ts Normal file
View File

@@ -0,0 +1,77 @@
import { z } from 'zod';
/**
* Environment variable schema with validation
* Fails fast on startup if required variables are missing
*/
const envSchema = z.object({
// Server
NODE_ENV: z.enum(['development', 'production', 'test']).default('development'),
PORT: z.string().transform(Number).pipe(z.number().positive()).default('3000'),
// PostgreSQL
POSTGRES_HOST: z.string().min(1),
POSTGRES_PORT: z.string().transform(Number).pipe(z.number().positive()).default('5432'),
POSTGRES_USER: z.string().min(1),
POSTGRES_PASSWORD: z.string().min(1),
POSTGRES_DB: z.string().min(1),
// Redis
REDIS_HOST: z.string().min(1),
REDIS_PORT: z.string().transform(Number).pipe(z.number().positive()).default('6379'),
REDIS_TTL_SECONDS: z.string().transform(Number).pipe(z.number().positive()).default('604800'), // 7 days
// Rate Limiting
RATE_LIMIT_WINDOW_MS: z.string().transform(Number).pipe(z.number().positive()).default('60000'), // 1 minute
RATE_LIMIT_MAX_REQUESTS: z.string().transform(Number).pipe(z.number().positive()).default('30'),
// API Keys (named keys for different frontends)
API_KEY_WEB: z.string().min(1),
API_KEY_MOBILE: z.string().min(1),
API_KEY_ADMIN: z.string().min(1),
// TMDB API
TMDB_API_KEY: z.string().min(1),
TMDB_ACCESS_TOKEN: z.string().min(1),
});
export type EnvConfig = z.infer<typeof envSchema>;
/**
* Parse and validate environment variables
* Throws error on validation failure
*/
function parseEnv(): EnvConfig {
const result = envSchema.safeParse(process.env);
if (!result.success) {
const errors = result.error.issues.map(
(issue) => ` - ${issue.path.join('.')}: ${issue.message}`
);
throw new Error(
`Environment validation failed:\n${errors.join('\n')}\n\nPlease check your .env file.`
);
}
return result.data;
}
export const env = parseEnv();
/**
* Get all valid API keys as a Set for O(1) lookup
*/
export function getValidApiKeys(): Set<string> {
return new Set([
env.API_KEY_WEB,
env.API_KEY_MOBILE,
env.API_KEY_ADMIN,
]);
}
/**
* Get database connection URL
*/
export function getDatabaseUrl(): string {
return `postgresql://${env.POSTGRES_USER}:${env.POSTGRES_PASSWORD}@${env.POSTGRES_HOST}:${env.POSTGRES_PORT}/${env.POSTGRES_DB}`;
}

66
src/config/redis.ts Normal file
View File

@@ -0,0 +1,66 @@
import Redis from 'ioredis';
import { env } from './env.js';
import logger from '../utils/logger.js';
/**
* Redis Client singleton
*/
const globalForRedis = globalThis as unknown as {
redis: Redis | undefined;
};
export const redis =
globalForRedis.redis ??
new Redis({
host: env.REDIS_HOST,
port: env.REDIS_PORT,
retryStrategy: (times: number) => {
if (times > 5) {
logger.error('Redis connection failed after 5 retries');
return null;
}
const delay = Math.min(times * 1000, 5000);
logger.warn(`Redis retrying connection in ${delay}ms`, { attempt: times });
return delay;
},
maxRetriesPerRequest: 3,
});
if (env.NODE_ENV !== 'production') {
globalForRedis.redis = redis;
}
redis.on('connect', () => {
logger.info('Redis connected successfully', {
host: env.REDIS_HOST,
port: env.REDIS_PORT,
});
});
redis.on('error', (error) => {
logger.error('Redis connection error', {
error: error.message,
});
});
/**
* Check Redis connection
*/
export async function checkRedisConnection(): Promise<boolean> {
try {
const result = await redis.ping();
return result === 'PONG';
} catch {
return false;
}
}
/**
* Disconnect from Redis
*/
export async function disconnectRedis(): Promise<void> {
await redis.quit();
logger.info('Redis disconnected');
}
export default redis;

131
src/config/socket.ts Normal file
View File

@@ -0,0 +1,131 @@
import { Server as HttpServer } from 'http';
import { Server, Socket } from 'socket.io';
import logger from '../utils/logger.js';
/**
* Socket.IO Server singleton
*/
let io: Server | null = null;
export interface SocketData {
subscribedJobs: Set<string>;
}
/**
* Initialize Socket.IO server
*/
export function initializeSocket(httpServer: HttpServer): Server {
io = new Server(httpServer, {
cors: {
origin: '*', // Configure based on your frontend domains
methods: ['GET', 'POST'],
},
transports: ['websocket', 'polling'],
});
io.on('connection', (socket: Socket) => {
logger.info('Client connected', { socketId: socket.id });
// Initialize socket data
(socket.data as SocketData).subscribedJobs = new Set();
// Handle job subscription
socket.on('job:subscribe', (jobId: string) => {
(socket.data as SocketData).subscribedJobs.add(jobId);
socket.join(`job:${jobId}`);
logger.debug('Client subscribed to job', { socketId: socket.id, jobId });
});
// Handle job unsubscription
socket.on('job:unsubscribe', (jobId: string) => {
(socket.data as SocketData).subscribedJobs.delete(jobId);
socket.leave(`job:${jobId}`);
logger.debug('Client unsubscribed from job', { socketId: socket.id, jobId });
});
socket.on('disconnect', () => {
logger.info('Client disconnected', { socketId: socket.id });
});
});
logger.info('Socket.IO server initialized');
return io;
}
/**
* Get Socket.IO server instance
*/
export function getSocketIO(): Server {
if (!io) {
throw new Error('Socket.IO not initialized. Call initializeSocket first.');
}
return io;
}
/**
* Emit job progress to subscribers
*/
export function emitJobProgress(
jobId: string,
progress: number,
status: string,
step: string
): void {
if (io) {
io.to(`job:${jobId}`).emit('job:progress', {
jobId,
progress,
status,
step,
});
}
}
/**
* Emit job completed event
*/
export function emitJobCompleted(
jobId: string,
data: unknown,
source: string
): void {
if (io) {
io.to(`job:${jobId}`).emit('job:completed', {
jobId,
data,
source,
});
}
}
/**
* Emit job error event
*/
export function emitJobError(
jobId: string,
error: { code: string; message: string }
): void {
if (io) {
io.to(`job:${jobId}`).emit('job:error', {
jobId,
error,
});
}
}
/**
* Close Socket.IO server
*/
export async function closeSocket(): Promise<void> {
if (io) {
await new Promise<void>((resolve) => {
io!.close(() => {
logger.info('Socket.IO server closed');
resolve();
});
});
io = null;
}
}
export default io;

112
src/index.ts Normal file
View File

@@ -0,0 +1,112 @@
import express from 'express';
import { createServer } from 'http';
import { env, getDatabaseUrl } from './config/env.js';
import { connectDatabase, disconnectDatabase } from './config/database.js';
import { disconnectRedis } from './config/redis.js';
import { initializeSocket, closeSocket } from './config/socket.js';
import { rateLimiter } from './middleware/rateLimit.middleware.js';
import { errorHandler, notFoundHandler } from './middleware/error.middleware.js';
import apiRoutes from './routes/api.routes.js';
import tmdbRoutes from './routes/tmdb.routes.js';
import healthRoutes from './routes/health.routes.js';
import logger from './utils/logger.js';
// Set DATABASE_URL for Prisma
process.env.DATABASE_URL = getDatabaseUrl();
/**
* Application entry point
*/
async function main() {
const app = express();
const httpServer = createServer(app);
// Initialize Socket.IO
initializeSocket(httpServer);
// Middleware
app.use(express.json({ limit: '10mb' }));
app.use(express.urlencoded({ extended: true }));
// Apply general rate limiting
app.use(rateLimiter);
// Request logging middleware
app.use((req, res, next) => {
logger.info('Incoming request', {
method: req.method,
path: req.path,
ip: req.ip,
userAgent: req.headers['user-agent'],
});
next();
});
// Health check routes (no auth required)
app.use(healthRoutes);
// API routes
app.use('/api', apiRoutes);
app.use('/api/tmdb', tmdbRoutes);
// 404 handler
app.use(notFoundHandler);
// Error handler
app.use(errorHandler);
// Connect to database with retry
await connectDatabase();
// Start server
httpServer.listen(env.PORT, () => {
logger.info('Server started', {
port: env.PORT,
env: env.NODE_ENV,
});
});
// Graceful shutdown handlers
const gracefulShutdown = async (signal: string) => {
logger.info(`Received ${signal}, starting graceful shutdown`);
// Close server
httpServer.close(() => {
logger.info('HTTP server closed');
});
// Close connections
await closeSocket();
await disconnectRedis();
await disconnectDatabase();
logger.info('Graceful shutdown completed');
process.exit(0);
};
process.on('SIGTERM', () => gracefulShutdown('SIGTERM'));
process.on('SIGINT', () => gracefulShutdown('SIGINT'));
// Handle uncaught exceptions
process.on('uncaughtException', (error) => {
logger.error('Uncaught exception', {
error: error.message,
stack: error.stack,
});
process.exit(1);
});
process.on('unhandledRejection', (reason) => {
logger.error('Unhandled rejection', {
reason: reason instanceof Error ? reason.message : String(reason),
});
});
}
main().catch((error) => {
logger.error('Application startup failed', {
error: error.message,
stack: error.stack,
});
process.exit(1);
});

View File

@@ -0,0 +1,73 @@
import { Request, Response, NextFunction } from 'express';
import { getValidApiKeys } from '../config/env.js';
import logger from '../utils/logger.js';
import type { ApiResponse } from '../types/index.js';
/**
* API Key Authentication Middleware
* Validates API key from X-API-Key header
*/
export function authMiddleware(
req: Request,
res: Response,
next: NextFunction
): void {
const apiKey = req.headers['x-api-key'] as string | undefined;
if (!apiKey) {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'MISSING_API_KEY',
message: 'API key is required. Include X-API-Key header.',
},
};
logger.warn('Request missing API key', {
ip: req.ip,
path: req.path,
});
res.status(401).json(response);
return;
}
const validKeys = getValidApiKeys();
if (!validKeys.has(apiKey)) {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'INVALID_API_KEY',
message: 'Invalid API key provided.',
},
};
logger.warn('Invalid API key attempt', {
ip: req.ip,
path: req.path,
keyPrefix: apiKey.substring(0, 8) + '...',
});
res.status(403).json(response);
return;
}
// Valid API key, proceed
next();
}
/**
* Optional: Identify which client made the request
*/
export function identifyClient(apiKey: string): string {
const { env } = require('../config/env.js');
if (apiKey === env.API_KEY_WEB) return 'web';
if (apiKey === env.API_KEY_MOBILE) return 'mobile';
if (apiKey === env.API_KEY_ADMIN) return 'admin';
return 'unknown';
}
export default authMiddleware;

View File

@@ -0,0 +1,50 @@
import { Request, Response, NextFunction } from 'express';
import logger from '../utils/logger.js';
import type { ApiResponse } from '../types/index.js';
/**
* Global Error Handler Middleware
*/
export function errorHandler(
error: Error,
req: Request,
res: Response,
_next: NextFunction
): void {
logger.error('Unhandled error', {
error: error.message,
stack: error.stack,
path: req.path,
method: req.method,
});
const response: ApiResponse<never> = {
success: false,
error: {
code: 'INTERNAL_ERROR',
message: 'An unexpected error occurred. Please try again later.',
},
};
res.status(500).json(response);
}
/**
* 404 Not Found Handler
*/
export function notFoundHandler(
req: Request,
res: Response
): void {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'NOT_FOUND',
message: `Endpoint ${req.method} ${req.path} not found`,
},
};
res.status(404).json(response);
}
export default errorHandler;

View File

@@ -0,0 +1,87 @@
import rateLimit from 'express-rate-limit';
import { env } from '../config/env.js';
import logger from '../utils/logger.js';
import type { ApiResponse } from '../types/index.js';
/**
* Rate Limiter Configuration
* Limits requests per IP within a time window
*/
export const rateLimiter = rateLimit({
windowMs: env.RATE_LIMIT_WINDOW_MS, // Time window in milliseconds
max: env.RATE_LIMIT_MAX_REQUESTS, // Max requests per window per IP
standardHeaders: true, // Return rate limit info in RateLimit-* headers
legacyHeaders: false, // Disable X-RateLimit-* headers
// Custom key generator (use IP + API key for more granular limiting)
keyGenerator: (req) => {
const apiKey = req.headers['x-api-key'] as string | undefined;
return `${req.ip}:${apiKey || 'no-key'}`;
},
// Custom handler for rate limit exceeded
handler: (req, res) => {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'RATE_LIMIT_EXCEEDED',
message: `Too many requests. Maximum ${env.RATE_LIMIT_MAX_REQUESTS} requests per ${env.RATE_LIMIT_WINDOW_MS / 1000} seconds.`,
details: {
retryAfter: Math.ceil(env.RATE_LIMIT_WINDOW_MS / 1000),
},
},
};
logger.warn('Rate limit exceeded', {
ip: req.ip,
path: req.path,
maxRequests: env.RATE_LIMIT_MAX_REQUESTS,
windowMs: env.RATE_LIMIT_WINDOW_MS,
});
res.status(429).json(response);
},
// Skip rate limiting for health checks
skip: (req) => {
return req.path === '/health' || req.path === '/ready';
},
});
/**
* Stricter rate limiter for scraping endpoints
* Prevents abuse of Netflix scraping
*/
export const scrapeRateLimiter = rateLimit({
windowMs: 60 * 1000, // 1 minute
max: 10, // Only 10 scrape requests per minute
standardHeaders: true,
legacyHeaders: false,
keyGenerator: (req) => {
const apiKey = req.headers['x-api-key'] as string | undefined;
return `scrape:${req.ip}:${apiKey || 'no-key'}`;
},
handler: (req, res) => {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'SCRAPE_RATE_LIMIT_EXCEEDED',
message: 'Too many scrape requests. Please wait before trying again.',
details: {
retryAfter: 60,
},
},
};
logger.warn('Scrape rate limit exceeded', {
ip: req.ip,
path: req.path,
});
res.status(429).json(response);
},
});
export default rateLimiter;

View File

@@ -0,0 +1,93 @@
import { Request, Response, NextFunction } from 'express';
import { z } from 'zod';
import type { ApiResponse, GetInfoRequest } from '../types/index.js';
/**
* Validation schema for /api/getinfo endpoint
*/
const getInfoSchema = z.object({
url: z.string().url('Invalid URL format').refine((url) => {
// Validate Netflix URL
try {
const parsedUrl = new URL(url);
const validHosts = [
'www.netflix.com',
'netflix.com',
'www.netflix.com.tr',
'netflix.com.tr',
];
const hasTitlePath = /\/title\/\d+/.test(url);
return validHosts.includes(parsedUrl.hostname) && hasTitlePath;
} catch {
return false;
}
}, 'URL must be a valid Netflix title URL (e.g., https://www.netflix.com/tr/title/81616256)'),
});
/**
* Validate request body for /api/getinfo
*/
export function validateGetInfo(
req: Request,
res: Response,
next: NextFunction
): void {
const result = getInfoSchema.safeParse(req.body);
if (!result.success) {
const errors = result.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<never> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid request parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
// Attach validated data to request
(req as Request & { validated: GetInfoRequest }).validated = result.data;
next();
}
/**
* Generic validation middleware factory
*/
export function validateBody<T extends z.ZodType>(
schema: T
): (req: Request, res: Response, next: NextFunction) => void {
return (req, res, next) => {
const result = schema.safeParse(req.body);
if (!result.success) {
const errors = result.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<never> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid request parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
next();
};
}
export default validateGetInfo;

234
src/routes/api.routes.ts Normal file
View File

@@ -0,0 +1,234 @@
import { Router, Request, Response } from 'express';
import { z } from 'zod';
import { authMiddleware } from '../middleware/auth.middleware.js';
import { scrapeRateLimiter } from '../middleware/rateLimit.middleware.js';
import { validateGetInfo } from '../middleware/validation.middleware.js';
import { JobService } from '../services/job.service.js';
import { ContentService } from '../services/content.service.js';
import type { ApiResponse, GetInfoRequest, GetInfoResponse } from '../types/index.js';
const router = Router();
const listContentSchema = z.object({
type: z.enum(['movie', 'tvshow']).optional(),
limit: z.coerce.number().int().min(1).max(100).optional(),
});
/**
* POST /api/getinfo
* Get content information from Netflix URL
*
* Request body: { url: string }
* Headers: X-API-Key: <api_key>
*
* Response: { success: boolean, data?: ContentData, error?: ApiError }
*/
router.post(
'/getinfo',
authMiddleware,
scrapeRateLimiter,
validateGetInfo,
async (
req: Request & { validated: GetInfoRequest },
res: Response<ApiResponse<GetInfoResponse>>
) => {
const { url } = req.validated;
try {
// Process synchronously (hybrid: cache -> db -> netflix)
const result = await JobService.processSync(url);
const response: ApiResponse<GetInfoResponse> = {
success: true,
data: result.data,
};
res.json(response);
} catch (error) {
const response: ApiResponse<GetInfoResponse> = {
success: false,
error: {
code: 'SCRAPE_ERROR',
message:
error instanceof Error ? error.message : 'Failed to scrape content',
},
};
res.status(500).json(response);
}
}
);
/**
* GET /api/content
* List content already stored in DB
*
* Query params: type?: movie|tvshow, limit?: 1-100
* Headers: X-API-Key: <api_key>
*/
router.get(
'/content',
authMiddleware,
async (
req: Request,
res: Response<ApiResponse<GetInfoResponse[]>>
) => {
const validation = listContentSchema.safeParse(req.query);
if (!validation.success) {
const errors = validation.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<GetInfoResponse[]> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid query parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
try {
const content = await ContentService.list({
type: validation.data.type,
limit: validation.data.limit ?? 100,
});
const response: ApiResponse<GetInfoResponse[]> = {
success: true,
data: content.map((item) => ContentService.toApiResponse(item)),
};
res.json(response);
} catch (error) {
const response: ApiResponse<GetInfoResponse[]> = {
success: false,
error: {
code: 'CONTENT_LIST_ERROR',
message:
error instanceof Error ? error.message : 'Failed to fetch content',
},
};
res.status(500).json(response);
}
}
);
/**
* POST /api/getinfo/async
* Create async job for content scraping
*
* Request body: { url: string }
* Headers: X-API-Key: <api_key>
*
* Response: { success: boolean, data?: { jobId: string }, error?: ApiError }
*/
router.post(
'/getinfo/async',
authMiddleware,
scrapeRateLimiter,
validateGetInfo,
async (
req: Request & { validated: GetInfoRequest },
res: Response<ApiResponse<{ jobId: string; status: string }>>
) => {
const { url } = req.validated;
try {
// Create job
const job = await JobService.create(url);
// Start processing in background
JobService.process(job.id).catch((err) => {
console.error('Job processing error:', err);
});
const response: ApiResponse<{ jobId: string; status: string }> = {
success: true,
data: {
jobId: job.id,
status: job.status,
},
};
res.status(202).json(response);
} catch (error) {
const response: ApiResponse<{ jobId: string; status: string }> = {
success: false,
error: {
code: 'JOB_CREATE_ERROR',
message:
error instanceof Error ? error.message : 'Failed to create job',
},
};
res.status(500).json(response);
}
}
);
/**
* GET /api/jobs/:jobId
* Get job status
*
* Headers: X-API-Key: <api_key>
*/
router.get(
'/jobs/:jobId',
authMiddleware,
async (req: Request, res: Response) => {
const { jobId } = req.params;
if (!jobId) {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'jobId is required',
},
};
res.status(400).json(response);
return;
}
try {
const job = await JobService.getById(jobId);
if (!job) {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'JOB_NOT_FOUND',
message: 'Job not found',
},
};
res.status(404).json(response);
return;
}
const response = {
success: true,
data: job,
};
res.json(response);
} catch (error) {
const response: ApiResponse<never> = {
success: false,
error: {
code: 'JOB_FETCH_ERROR',
message:
error instanceof Error ? error.message : 'Failed to fetch job',
},
};
res.status(500).json(response);
}
}
);
export default router;

View File

@@ -0,0 +1,54 @@
import { Router, Request, Response } from 'express';
import { checkRedisConnection } from '../config/redis.js';
import prisma from '../config/database.js';
import { env } from '../config/env.js';
const router = Router();
/**
* GET /health
* Basic health check endpoint
*/
router.get('/health', (_req: Request, res: Response) => {
res.status(200).json({
status: 'ok',
timestamp: new Date().toISOString(),
uptime: process.uptime(),
});
});
/**
* GET /ready
* Readiness check - verifies all dependencies are available
*/
router.get('/ready', async (_req: Request, res: Response) => {
const checks = {
database: false,
redis: false,
};
// Check database
try {
await prisma.$queryRaw`SELECT 1`;
checks.database = true;
} catch (error) {
console.error('Database health check failed:', error);
}
// Check Redis
checks.redis = await checkRedisConnection();
const allHealthy = checks.database && checks.redis;
res.status(allHealthy ? 200 : 503).json({
status: allHealthy ? 'ready' : 'not_ready',
timestamp: new Date().toISOString(),
checks: {
database: checks.database ? 'healthy' : 'unhealthy',
redis: checks.redis ? 'healthy' : 'unhealthy',
},
env: env.NODE_ENV,
});
});
export default router;

222
src/routes/tmdb.routes.ts Normal file
View File

@@ -0,0 +1,222 @@
import { Router, Request, Response } from 'express';
import { z } from 'zod';
import { authMiddleware } from '../middleware/auth.middleware.js';
import { scrapeRateLimiter } from '../middleware/rateLimit.middleware.js';
import { TmdbService } from '../services/tmdb.service.js';
import type {
ApiResponse,
TmdbSearchResponse,
} from '../types/index.js';
const router = Router();
/**
* Validation schema for TMDB search
*/
const tmdbSearchSchema = z.object({
query: z.string().trim().min(1, 'Query must be at least 1 character').max(200, 'Query must be at most 200 characters'),
year: z.coerce.number().int().min(1900).max(new Date().getFullYear() + 10).optional(),
type: z.enum(['movie', 'tv', 'multi']).optional(),
seasonYear: z.coerce.number().int().min(1900).max(new Date().getFullYear() + 10).optional(),
seasonNumber: z.coerce.number().int().min(1).max(100).optional(),
});
/**
* POST /api/tmdb/search
* Search for movies and TV shows using TMDB API
*
* Request body: { query: string, year?: number, type?: 'movie' | 'tv' | 'multi' }
* Headers: X-API-Key: <api_key>
*
* Response: { success: boolean, data?: TmdbSearchResponse, error?: ApiError }
*/
router.post(
'/search',
authMiddleware,
scrapeRateLimiter,
async (
req: Request,
res: Response<ApiResponse<TmdbSearchResponse>>
) => {
// Validate request body
const result = tmdbSearchSchema.safeParse(req.body);
if (!result.success) {
const errors = result.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid request parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
const { query, year, type, seasonYear, seasonNumber } = result.data;
try {
const searchResult = await TmdbService.search({
query,
year,
type: type || 'multi',
seasonYear,
seasonNumber,
});
const response: ApiResponse<TmdbSearchResponse> = {
success: true,
data: searchResult,
};
res.json(response);
} catch (error) {
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'TMDB_ERROR',
message:
error instanceof Error ? error.message : 'Failed to search TMDB',
},
};
res.status(500).json(response);
}
}
);
/**
* POST /api/tmdb/search/movie
* Search for movies only
*/
router.post(
'/search/movie',
authMiddleware,
scrapeRateLimiter,
async (
req: Request,
res: Response<ApiResponse<TmdbSearchResponse>>
) => {
const movieSearchSchema = z.object({
query: z.string().trim().min(1).max(200),
year: z.coerce.number().int().min(1900).max(new Date().getFullYear() + 10).optional(),
});
const result = movieSearchSchema.safeParse(req.body);
if (!result.success) {
const errors = result.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid request parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
const { query, year } = result.data;
try {
const searchResult = await TmdbService.searchMovies(query, year);
const response: ApiResponse<TmdbSearchResponse> = {
success: true,
data: searchResult,
};
res.json(response);
} catch (error) {
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'TMDB_ERROR',
message:
error instanceof Error ? error.message : 'Failed to search movies',
},
};
res.status(500).json(response);
}
}
);
/**
* POST /api/tmdb/search/tv
* Search for TV shows only
*/
router.post(
'/search/tv',
authMiddleware,
scrapeRateLimiter,
async (
req: Request,
res: Response<ApiResponse<TmdbSearchResponse>>
) => {
const tvSearchSchema = z.object({
query: z.string().trim().min(1).max(200),
year: z.coerce.number().int().min(1900).max(new Date().getFullYear() + 10).optional(),
seasonYear: z.coerce.number().int().min(1900).max(new Date().getFullYear() + 10).optional(),
seasonNumber: z.coerce.number().int().min(1).max(100).optional(),
});
const result = tvSearchSchema.safeParse(req.body);
if (!result.success) {
const errors = result.error.issues.map((issue) => ({
field: issue.path.join('.'),
message: issue.message,
}));
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'VALIDATION_ERROR',
message: 'Invalid request parameters',
details: { errors },
},
};
res.status(400).json(response);
return;
}
const { query, year, seasonYear, seasonNumber } = result.data;
try {
const searchResult = await TmdbService.searchTv(query, year, seasonNumber, seasonYear);
const response: ApiResponse<TmdbSearchResponse> = {
success: true,
data: searchResult,
};
res.json(response);
} catch (error) {
const response: ApiResponse<TmdbSearchResponse> = {
success: false,
error: {
code: 'TMDB_ERROR',
message:
error instanceof Error ? error.message : 'Failed to search TV shows',
},
};
res.status(500).json(response);
}
}
);
export default router;

View File

@@ -0,0 +1,146 @@
import redis from '../config/redis.js';
import { env } from '../config/env.js';
import logger from '../utils/logger.js';
import type { GetInfoResponse, CacheEntry, DataSource } from '../types/index.js';
/**
* Cache key prefix for Netflix content
*/
const CACHE_PREFIX = 'netflix:content:';
/**
* Generate cache key from URL
*/
function getCacheKey(url: string): string {
// Use URL hash or title ID as key
const titleId = url.match(/\/title\/(\d+)/)?.[1] || url;
return `${CACHE_PREFIX}${titleId}`;
}
/**
* Cache Service for Redis operations
* Handles caching with TTL support
*/
export class CacheService {
/**
* Get cached content by URL
*/
static async get(url: string): Promise<GetInfoResponse | null> {
const key = getCacheKey(url);
try {
const cached = await redis.get(key);
if (!cached) {
logger.debug('Cache miss', { url });
return null;
}
logger.debug('Cache hit', { url });
const entry: CacheEntry<GetInfoResponse> = JSON.parse(cached);
return entry.data;
} catch (error) {
logger.error('Cache get error', {
url,
error: error instanceof Error ? error.message : 'Unknown error',
});
return null;
}
}
/**
* Set cache entry with TTL
*/
static async set(url: string, data: GetInfoResponse): Promise<void> {
const key = getCacheKey(url);
const ttl = env.REDIS_TTL_SECONDS;
const entry: CacheEntry<GetInfoResponse> = {
data,
cachedAt: Date.now(),
ttl,
};
try {
await redis.setex(key, ttl, JSON.stringify(entry));
logger.debug('Cache set', { url, ttl });
} catch (error) {
logger.error('Cache set error', {
url,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Delete cached content
*/
static async delete(url: string): Promise<void> {
const key = getCacheKey(url);
try {
await redis.del(key);
logger.debug('Cache deleted', { url });
} catch (error) {
logger.error('Cache delete error', {
url,
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
/**
* Check if cache exists
*/
static async exists(url: string): Promise<boolean> {
const key = getCacheKey(url);
try {
const result = await redis.exists(key);
return result === 1;
} catch (error) {
logger.error('Cache exists check error', {
url,
error: error instanceof Error ? error.message : 'Unknown error',
});
return false;
}
}
/**
* Get cache TTL remaining
*/
static async getTTL(url: string): Promise<number> {
const key = getCacheKey(url);
try {
return await redis.ttl(key);
} catch (error) {
logger.error('Cache TTL check error', {
url,
error: error instanceof Error ? error.message : 'Unknown error',
});
return -1;
}
}
/**
* Clear all Netflix content cache
*/
static async clearAll(): Promise<void> {
try {
const keys = await redis.keys(`${CACHE_PREFIX}*`);
if (keys.length > 0) {
await redis.del(...keys);
logger.info('Cache cleared', { count: keys.length });
}
} catch (error) {
logger.error('Cache clear error', {
error: error instanceof Error ? error.message : 'Unknown error',
});
}
}
}
export default CacheService;

View File

@@ -0,0 +1,239 @@
import prisma from '../config/database.js';
import type { ContentData, ScraperResult, GetInfoResponse } from '../types/index.js';
/**
* Content Service for database operations
*/
export class ContentService {
/**
* List content items from database
*/
static async list(options?: {
type?: 'movie' | 'tvshow';
limit?: number;
}): Promise<ContentData[]> {
const content = await prisma.content.findMany({
where: options?.type ? { type: options.type } : undefined,
include: {
genres: {
include: {
genre: true,
},
},
castMembers: {
orderBy: { name: 'asc' },
},
},
orderBy: { createdAt: 'desc' },
take: options?.limit,
});
return content.map((item) => this.mapToContentData(item));
}
/**
* Find content by URL
*/
static async findByUrl(url: string): Promise<ContentData | null> {
const content = await prisma.content.findUnique({
where: { url },
include: {
genres: {
include: {
genre: true,
},
},
castMembers: {
orderBy: { name: 'asc' },
},
},
});
if (!content) {
return null;
}
return this.mapToContentData(content);
}
/**
* Create new content from scraper result
*/
static async create(
url: string,
scraperResult: ScraperResult
): Promise<ContentData> {
// Create or find genres
const genreConnections = await Promise.all(
scraperResult.genres.map(async (genreName) => {
const genre = await prisma.genre.upsert({
where: { name: genreName },
update: {},
create: { name: genreName },
});
return { genreId: genre.id };
})
);
// Create content with genres and cast
const content = await prisma.content.create({
data: {
url,
title: scraperResult.title,
year: scraperResult.year,
plot: scraperResult.plot,
backdropUrl: scraperResult.backdropUrl,
ageRating: scraperResult.ageRating,
type: scraperResult.type,
currentSeason: scraperResult.currentSeason,
genres: {
create: genreConnections,
},
castMembers: {
create: scraperResult.cast.map((name) => ({ name })),
},
},
include: {
genres: {
include: {
genre: true,
},
},
castMembers: {
orderBy: { name: 'asc' },
},
},
});
return this.mapToContentData(content);
}
/**
* Update existing content
*/
static async update(
url: string,
scraperResult: ScraperResult
): Promise<ContentData> {
// Delete existing genres and cast
const existingContent = await prisma.content.findUnique({
where: { url },
});
if (existingContent) {
await prisma.contentGenre.deleteMany({
where: { contentId: existingContent.id },
});
await prisma.castMember.deleteMany({
where: { contentId: existingContent.id },
});
}
// Create or find genres
const genreConnections = await Promise.all(
scraperResult.genres.map(async (genreName) => {
const genre = await prisma.genre.upsert({
where: { name: genreName },
update: {},
create: { name: genreName },
});
return { genreId: genre.id };
})
);
// Update content
const content = await prisma.content.update({
where: { url },
data: {
title: scraperResult.title,
year: scraperResult.year,
plot: scraperResult.plot,
backdropUrl: scraperResult.backdropUrl,
ageRating: scraperResult.ageRating,
type: scraperResult.type,
currentSeason: scraperResult.currentSeason,
genres: {
create: genreConnections,
},
castMembers: {
create: scraperResult.cast.map((name) => ({ name })),
},
},
include: {
genres: {
include: {
genre: true,
},
},
castMembers: {
orderBy: { name: 'asc' },
},
},
});
return this.mapToContentData(content);
}
/**
* Delete content by URL
*/
static async delete(url: string): Promise<void> {
await prisma.content.delete({
where: { url },
});
}
/**
* Map database result to ContentData type
*/
private static mapToContentData(content: {
id: string;
url: string;
title: string;
year: number | null;
plot: string | null;
backdropUrl: string | null;
ageRating: string | null;
type: string;
currentSeason: number | null;
createdAt: Date;
updatedAt: Date;
genres: { genre: { name: string } }[];
castMembers: { name: string }[];
}): ContentData {
return {
id: content.id,
url: content.url,
title: content.title,
year: content.year,
plot: content.plot,
backdropUrl: content.backdropUrl,
ageRating: content.ageRating,
type: content.type as 'movie' | 'tvshow',
currentSeason: content.currentSeason,
genres: content.genres.map((g) => g.genre.name),
cast: content.castMembers.map((c) => c.name),
createdAt: content.createdAt,
updatedAt: content.updatedAt,
};
}
/**
* Convert ContentData to API response format
*/
static toApiResponse(data: ContentData): GetInfoResponse {
return {
title: data.title,
year: data.year,
plot: data.plot,
ageRating: data.ageRating,
type: data.type,
currentSeason: data.currentSeason,
genres: data.genres,
cast: data.cast,
backdrop: data.backdropUrl,
};
}
}
export default ContentService;

237
src/services/job.service.ts Normal file
View File

@@ -0,0 +1,237 @@
import { v4 as uuidv4 } from 'uuid';
import prisma from '../config/database.js';
import { CacheService } from './cache.service.js';
import { ContentService } from './content.service.js';
import { ScraperService } from './scraper.service.js';
import {
emitJobProgress,
emitJobCompleted,
emitJobError,
} from '../config/socket.js';
import logger from '../utils/logger.js';
import type {
ScrapeJob,
JobStatus,
GetInfoResponse,
DataSource,
ApiError,
} from '../types/index.js';
/**
* Job Service for async scrape operations
*/
export class JobService {
/**
* Create a new scrape job
*/
static async create(url: string): Promise<ScrapeJob> {
const job = await prisma.scrapeJob.create({
data: {
id: uuidv4(),
url,
status: 'pending',
progress: 0,
step: 'created',
},
});
logger.info('Job created', { jobId: job.id, url });
return this.mapToScrapeJob(job);
}
/**
* Get job by ID
*/
static async getById(jobId: string): Promise<ScrapeJob | null> {
const job = await prisma.scrapeJob.findUnique({
where: { id: jobId },
});
return job ? this.mapToScrapeJob(job) : null;
}
/**
* Update job status
*/
static async update(
jobId: string,
data: {
status?: JobStatus;
progress?: number;
step?: string;
result?: unknown;
error?: string;
}
): Promise<ScrapeJob> {
const job = await prisma.scrapeJob.update({
where: { id: jobId },
data,
});
return this.mapToScrapeJob(job);
}
/**
* Process a scrape job (hybrid: cache -> db -> netflix)
*/
static async process(jobId: string): Promise<void> {
const job = await this.getById(jobId);
if (!job) {
logger.error('Job not found', { jobId });
return;
}
try {
// Update status to processing
await this.update(jobId, {
status: 'processing',
progress: 10,
step: 'checking_cache',
});
emitJobProgress(jobId, 10, 'processing', 'Checking cache');
// Step 1: Check cache
const cachedData = await CacheService.get(job.url);
if (cachedData) {
await this.completeJob(jobId, cachedData, 'cache');
return;
}
// Update progress
await this.update(jobId, { progress: 30, step: 'checking_database' });
emitJobProgress(jobId, 30, 'processing', 'Checking database');
// Step 2: Check database
const dbContent = await ContentService.findByUrl(job.url);
if (dbContent) {
const responseData = ContentService.toApiResponse(dbContent);
// Cache the result
await CacheService.set(job.url, responseData);
await this.completeJob(jobId, responseData, 'database');
return;
}
// Update progress
await this.update(jobId, { progress: 50, step: 'scraping_netflix' });
emitJobProgress(jobId, 50, 'processing', 'Scraping Netflix');
// Step 3: Scrape from Netflix
const scraperResult = await ScraperService.scrape(job.url);
// Update progress
await this.update(jobId, { progress: 80, step: 'saving_to_database' });
emitJobProgress(jobId, 80, 'processing', 'Saving to database');
// Step 4: Save to database
const contentData = await ContentService.create(job.url, scraperResult);
const responseData = ContentService.toApiResponse(contentData);
// Step 5: Cache the result
await CacheService.set(job.url, responseData);
// Complete the job
await this.completeJob(jobId, responseData, 'netflix');
} catch (error) {
const apiError: ApiError = {
code: 'SCRAPE_ERROR',
message: error instanceof Error ? error.message : 'Unknown error occurred',
};
await this.update(jobId, {
status: 'failed',
error: apiError.message,
});
emitJobError(jobId, apiError);
logger.error('Job failed', {
jobId,
error: apiError.message,
});
}
}
/**
* Complete a job with result
*/
private static async completeJob(
jobId: string,
data: GetInfoResponse,
source: DataSource
): Promise<void> {
await this.update(jobId, {
status: 'completed',
progress: 100,
step: 'completed',
result: data,
});
emitJobCompleted(jobId, data, source);
logger.info('Job completed', { jobId, source });
}
/**
* Process job synchronously (for direct API calls)
*/
static async processSync(url: string): Promise<{
data: GetInfoResponse;
source: DataSource;
}> {
// Step 1: Check cache
const cachedData = await CacheService.get(url);
if (cachedData) {
return { data: cachedData, source: 'cache' };
}
// Step 2: Check database
const dbContent = await ContentService.findByUrl(url);
if (dbContent) {
const responseData = ContentService.toApiResponse(dbContent);
await CacheService.set(url, responseData);
return { data: responseData, source: 'database' };
}
// Step 3: Scrape from Netflix
const scraperResult = await ScraperService.scrape(url);
// Step 4: Save to database
const contentData = await ContentService.create(url, scraperResult);
const responseData = ContentService.toApiResponse(contentData);
// Step 5: Cache the result
await CacheService.set(url, responseData);
return { data: responseData, source: 'netflix' };
}
/**
* Map database result to ScrapeJob type
*/
private static mapToScrapeJob(job: {
id: string;
url: string;
status: string;
progress: number;
step: string | null;
result: unknown;
error: string | null;
createdAt: Date;
updatedAt: Date;
}): ScrapeJob {
return {
id: job.id,
url: job.url,
status: job.status as JobStatus,
progress: job.progress,
step: job.step || '',
result: job.result as ScrapeJob['result'],
error: job.error ? { code: 'JOB_ERROR', message: job.error } : undefined,
createdAt: job.createdAt,
updatedAt: job.updatedAt,
};
}
}
export default JobService;

View File

@@ -0,0 +1,284 @@
import * as cheerio from 'cheerio';
import type { ScraperResult, ContentType } from '../types/index.js';
import logger from '../utils/logger.js';
/**
* Age rating patterns to detect and exclude from genres
* Supports various formats including Unicode bidirectional text characters
* Unicode chars: \u2066-\u2069 (isolate), \u202A-\u202E (embedding), \u200E-\u200F (marks)
*/
const AGE_RATING_PATTERN = /^[\u2066-\u2069\u202A-\u202E\u200E-\u200F]*(\d+\+|PG-?13|PG|NC-?17|R|G|TV-?MA|TV-?14|TV-?PG|TV-?G|TV-?Y7?-?FV?|TV-?Y)[\u2066-\u2069\u202A-\u202E\u200E-\u200F]*$/i;
/**
* Season pattern to detect TV shows and extract season number
* Matches patterns like "3 Sezon", "2 Seasons", "1. Sezon", etc.
*/
const SEASON_PATTERN = /(\d+)\.?\s*(sezon|season|sezonlar|seasons)/i;
/**
* Netflix HTML Scraper Service
* Uses Cheerio for parsing HTML content
*/
export class ScraperService {
/**
* Validate if URL is a valid Netflix URL
*/
static isValidNetflixUrl(url: string): boolean {
try {
const parsedUrl = new URL(url);
const validHosts = [
'www.netflix.com',
'netflix.com',
'www.netflix.com.tr',
'netflix.com.tr',
];
return validHosts.includes(parsedUrl.hostname);
} catch {
return false;
}
}
/**
* Extract Netflix title ID from URL
*/
static extractTitleId(url: string): string | null {
const match = url.match(/\/title\/(\d+)/);
return match ? match[1] : null;
}
/**
* Fetch HTML content from Netflix URL
*/
private static async fetchHtml(url: string): Promise<string> {
logger.info('Fetching Netflix page', { url });
const response = await fetch(url, {
headers: {
'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36',
'Accept-Language': 'tr-TR,tr;q=0.9,en-US;q=0.8,en;q=0.7',
Accept:
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
},
});
if (!response.ok) {
throw new Error(`Failed to fetch Netflix page: ${response.status}`);
}
return response.text();
}
/**
* Parse HTML and extract content data
*/
static async scrape(url: string): Promise<ScraperResult> {
if (!this.isValidNetflixUrl(url)) {
throw new Error('Invalid Netflix URL');
}
const html = await this.fetchHtml(url);
const $ = cheerio.load(html);
const title = this.extractTitle($);
const year = this.extractYear($);
const plot = this.extractPlot($);
const ageRating = this.extractAgeRating($);
const { genres, type, currentSeason } = this.extractGenresTypeAndSeason($);
const cast = this.extractCast($);
const backdropUrl = this.extractBackdrop($);
const result: ScraperResult = {
title,
year,
plot,
ageRating,
type,
genres,
cast,
backdropUrl,
currentSeason,
};
logger.info('Scraping completed', {
url,
title,
year,
ageRating,
type,
genresCount: genres.length,
castCount: cast.length,
});
return result;
}
/**
* Extract title from HTML
*/
private static extractTitle($: cheerio.CheerioAPI): string {
let title = $('h2.default-ltr-iqcdef-cache-tnklrp').first().text().trim();
if (!title) {
title = $('meta[property="og:title"]').attr('content') || '';
}
if (!title) {
const pageTitle = $('title').text();
title = pageTitle.replace(' | Netflix', '').trim();
}
return title || 'Unknown Title';
}
/**
* Extract year from HTML (first li element)
*/
private static extractYear($: cheerio.CheerioAPI): number | null {
const yearText = $('li.default-ltr-iqcdef-cache-6prs41').first().text().trim();
const year = parseInt(yearText, 10);
if (!isNaN(year) && year >= 1900 && year <= new Date().getFullYear() + 5) {
return year;
}
return null;
}
/**
* Extract plot/description from HTML
*/
private static extractPlot($: cheerio.CheerioAPI): string | null {
const plot = $('span.default-ltr-iqcdef-cache-6ukeej').first().text().trim();
if (!plot) {
const metaDesc = $('meta[property="og:description"]').attr('content');
return metaDesc || null;
}
return plot || null;
}
/**
* Extract age rating from HTML (e.g., "18+", "16+")
* Searches all li elements (except first which is year)
*/
private static extractAgeRating($: cheerio.CheerioAPI): string | null {
let ageRating: string | null = null;
const foundTexts: string[] = [];
$('li.default-ltr-iqcdef-cache-6prs41').each((index, element) => {
if (index === 0) return; // Skip year
const text = $(element).text().trim();
foundTexts.push(text);
// Clean Unicode characters first
const cleanText = text.replace(/[\u2066-\u2069\u202A-\u202E\u200E-\u200F]/g, '').trim();
if (cleanText && AGE_RATING_PATTERN.test(cleanText)) {
ageRating = cleanText;
return false; // Break loop
}
});
// Debug logging
if (!ageRating && foundTexts.length > 0) {
logger.debug('Age rating not found in elements', {
foundTexts,
pattern: AGE_RATING_PATTERN.source,
});
}
return ageRating;
}
/**
* Extract genres from HTML (skip year, age rating, and season info)
* Also detects content type (movie/tvshow) based on season presence
* Extracts current season number from season text
*/
private static extractGenresTypeAndSeason($: cheerio.CheerioAPI): { genres: string[]; type: ContentType; currentSeason: number | null } {
const genres: string[] = [];
let type: ContentType = 'movie';
let currentSeason: number | null = null;
const foundTexts: string[] = [];
$('li.default-ltr-iqcdef-cache-6prs41').each((index, element) => {
if (index === 0) return; // Skip year
const text = $(element).text().trim();
const cleanText = text.replace(/[\u2066\u2069\u202A\u202B\u202C\u202D\u202E\u200E\u200F]/g, '').trim();
foundTexts.push(cleanText);
// Check for season pattern - indicates TV show
const seasonMatch = cleanText.match(SEASON_PATTERN);
if (cleanText && seasonMatch) {
type = 'tvshow';
// Extract season number from the text
const seasonNum = parseInt(seasonMatch[1], 10);
if (!isNaN(seasonNum)) {
currentSeason = seasonNum;
}
return; // Skip adding to genres
}
// Skip age rating - only add actual genres
if (cleanText && !AGE_RATING_PATTERN.test(cleanText)) {
genres.push(cleanText);
}
});
// Debug logging
logger.debug('extractGenresTypeAndSeason completed', {
foundTexts,
genres,
type,
currentSeason,
});
return { genres, type, currentSeason };
}
/**
* Extract cast members from HTML
*/
private static extractCast($: cheerio.CheerioAPI): string[] {
const castText = $('span.default-ltr-iqcdef-cache-m0886o').first().text().trim();
if (!castText) {
return [];
}
return castText
.split(',')
.map((name) => name.trim())
.filter((name) => name.length > 0);
}
/**
* Extract backdrop image URL from HTML
*/
private static extractBackdrop($: cheerio.CheerioAPI): string | null {
const backdropDiv = $('div.default-ltr-iqcdef-cache-1wezh7a').first();
const img = backdropDiv.find('img').first();
const srcset = img.attr('srcset');
if (srcset) {
const sources = srcset.split(',');
const lastSource = sources[sources.length - 1]?.trim().split(' ')[0];
if (lastSource) {
return lastSource;
}
}
const src = img.attr('src');
if (src) {
return src;
}
return null;
}
}
export default ScraperService;

View File

@@ -0,0 +1,429 @@
import { env } from '../config/env.js';
import type {
TmdbSearchRequest,
TmdbSearchResult,
TmdbSearchResponse,
TmdbRawResponse,
TmdbRawMovie,
TmdbRawTv,
} from '../types/index.js';
import logger from '../utils/logger.js';
/**
* TMDB Genre ID to Name mapping
* Common genres used in movies and TV shows
*/
const GENRE_MAP: Record<number, string> = {
28: 'Action',
12: 'Adventure',
16: 'Animation',
35: 'Comedy',
80: 'Crime',
99: 'Documentary',
18: 'Drama',
10751: 'Family',
14: 'Fantasy',
36: 'History',
27: 'Horror',
10402: 'Music',
9648: 'Mystery',
10749: 'Romance',
878: 'Science Fiction',
10770: 'TV Movie',
53: 'Thriller',
10752: 'War',
37: 'Western',
10759: 'Action & Adventure',
10762: 'Kids',
10763: 'News',
10764: 'Reality',
10765: 'Sci-Fi & Fantasy',
10766: 'Soap',
10767: 'Talk',
10768: 'War & Politics',
};
/**
* TMDB API Base URL
*/
const TMDB_BASE_URL = 'https://api.themoviedb.org/3';
/**
* TMDB Image Base URL
*/
const TMDB_IMAGE_BASE_URL = 'https://image.tmdb.org/t/p/original';
/**
* TMDB Service for movie/TV show search
*/
export class TmdbService {
/**
* Get common headers for TMDB API requests
*/
private static getHeaders(): Record<string, string> {
return {
Authorization: `Bearer ${env.TMDB_ACCESS_TOKEN}`,
'Content-Type': 'application/json',
};
}
/**
* Extract year from date string
*/
private static extractYear(dateStr: string | null): number | null {
if (!dateStr) return null;
const year = parseInt(dateStr.split('-')[0] || '0', 10);
return isNaN(year) ? null : year;
}
/**
* Convert genre IDs to genre names
*/
private static mapGenreIds(genreIds: number[]): string[] {
return genreIds
.map((id) => GENRE_MAP[id])
.filter((name): name is string => name !== undefined);
}
/**
* Build full image URL
*/
private static buildImageUrl(path: string | null): string | null {
if (!path) return null;
return `${TMDB_IMAGE_BASE_URL}${path}`;
}
/**
* Normalize raw movie result to TmdbSearchResult
*/
private static normalizeMovie(movie: TmdbRawMovie): TmdbSearchResult {
return {
id: movie.id,
title: movie.title,
originalTitle: movie.original_title,
overview: movie.overview,
releaseDate: movie.release_date || null,
year: this.extractYear(movie.release_date),
type: 'movie',
posterPath: this.buildImageUrl(movie.poster_path),
backdropPath: this.buildImageUrl(movie.backdrop_path),
voteAverage: movie.vote_average,
voteCount: movie.vote_count,
popularity: movie.popularity,
genres: this.mapGenreIds(movie.genre_ids),
originalLanguage: movie.original_language,
};
}
/**
* Normalize raw TV result to TmdbSearchResult
*/
private static normalizeTv(tv: TmdbRawTv): TmdbSearchResult {
return {
id: tv.id,
title: tv.name,
originalTitle: tv.original_name,
overview: tv.overview,
releaseDate: tv.first_air_date || null,
year: this.extractYear(tv.first_air_date),
type: 'tv',
posterPath: this.buildImageUrl(tv.poster_path),
backdropPath: this.buildImageUrl(tv.backdrop_path),
voteAverage: tv.vote_average,
voteCount: tv.vote_count,
popularity: tv.popularity,
genres: this.mapGenreIds(tv.genre_ids),
originalLanguage: tv.original_language,
currentSeason: null,
totalSeasons: null,
};
}
/**
* Get TV show details including season count
*/
private static async getTvDetails(tvId: number): Promise<{ numberOfSeasons: number } | null> {
const url = `${TMDB_BASE_URL}/tv/${tvId}?language=tr-TR`;
try {
const response = await fetch(url, {
method: 'GET',
headers: this.getHeaders(),
});
if (!response.ok) {
return null;
}
const data = await response.json();
return {
numberOfSeasons: data.number_of_seasons || 0,
};
} catch {
return null;
}
}
/**
* Get specific season details including air date
*/
private static async getSeasonDetails(
tvId: number,
seasonNumber: number
): Promise<{ airDate: string | null; year: number | null } | null> {
const url = `${TMDB_BASE_URL}/tv/${tvId}/season/${seasonNumber}?language=tr-TR`;
try {
const response = await fetch(url, {
method: 'GET',
headers: this.getHeaders(),
});
if (!response.ok) {
return null;
}
const data = await response.json();
const airDate = data.air_date || null;
const year = airDate ? this.extractYear(airDate) : null;
return { airDate, year };
} catch {
return null;
}
}
/**
* Filter and enrich TV results based on season criteria
* Only returns shows that match the season requirements
*/
private static async filterAndEnrichTvResultsBySeason(
results: TmdbSearchResult[],
seasonNumber: number,
seasonYear?: number
): Promise<TmdbSearchResult[]> {
const enrichedResults: TmdbSearchResult[] = [];
// Process results sequentially to avoid rate limiting
for (const result of results) {
if (result.type !== 'tv') continue;
// Get TV details
const tvDetails = await this.getTvDetails(result.id);
if (!tvDetails) continue;
// Check if show has enough seasons
if (tvDetails.numberOfSeasons < seasonNumber) {
logger.debug('TV show filtered out - not enough seasons', {
title: result.title,
totalSeasons: tvDetails.numberOfSeasons,
requestedSeason: seasonNumber,
});
continue;
}
// If seasonYear is provided, check if the season's air year matches
if (seasonYear) {
const seasonDetails = await this.getSeasonDetails(result.id, seasonNumber);
if (!seasonDetails || seasonDetails.year !== seasonYear) {
logger.debug('TV show filtered out - season year mismatch', {
title: result.title,
requestedSeason: seasonNumber,
requestedYear: seasonYear,
actualYear: seasonDetails?.year,
});
continue;
}
}
// Show matches all criteria - add to results
enrichedResults.push({
...result,
totalSeasons: tvDetails.numberOfSeasons,
currentSeason: seasonNumber,
});
}
return enrichedResults;
}
/**
* Normalize raw result based on media type
*/
private static normalizeResult(result: TmdbRawMovie | TmdbRawTv): TmdbSearchResult | null {
const mediaType = result.media_type || ('title' in result ? 'movie' : 'tv');
if (mediaType === 'movie') {
return this.normalizeMovie(result as TmdbRawMovie);
} else if (mediaType === 'tv') {
return this.normalizeTv(result as TmdbRawTv);
}
return null;
}
/**
* Search for movies
*/
static async searchMovies(query: string, year?: number): Promise<TmdbSearchResponse> {
const params = new URLSearchParams({
query,
language: 'tr-TR',
});
if (year) {
params.append('year', year.toString());
}
const url = `${TMDB_BASE_URL}/search/movie?${params.toString()}`;
logger.info('TMDB: Searching movies', { query, year });
const response = await fetch(url, {
method: 'GET',
headers: this.getHeaders(),
});
if (!response.ok) {
const errorText = await response.text();
logger.error('TMDB API error', { status: response.status, error: errorText });
throw new Error(`TMDB API error: ${response.status}`);
}
const data: TmdbRawResponse = await response.json();
const results = data.results
.map((r) => this.normalizeMovie(r as TmdbRawMovie))
.filter((r): r is TmdbSearchResult => r !== null);
return {
page: data.page,
results,
totalPages: data.total_pages,
totalResults: data.total_results,
};
}
/**
* Search for TV shows
* @param query Search query
* @param year First air date year (optional - not recommended for accurate results)
* @param seasonNumber Required season number - only shows with this season will be returned
* @param seasonYear Required season year - only shows with matching season air year will be returned
*/
static async searchTv(
query: string,
year?: number,
seasonNumber?: number,
seasonYear?: number
): Promise<TmdbSearchResponse> {
const params = new URLSearchParams({
query,
language: 'tr-TR',
});
// Note: We don't use year for TV searches when seasonNumber is provided
// because the year from Netflix is the season's year, not the show's first air year
if (year && !seasonNumber) {
params.append('first_air_date_year', year.toString());
}
const url = `${TMDB_BASE_URL}/search/tv?${params.toString()}`;
logger.info('TMDB: Searching TV shows', { query, year, seasonNumber, seasonYear });
const response = await fetch(url, {
method: 'GET',
headers: this.getHeaders(),
});
if (!response.ok) {
const errorText = await response.text();
logger.error('TMDB API error', { status: response.status, error: errorText });
throw new Error(`TMDB API error: ${response.status}`);
}
const data: TmdbRawResponse = await response.json();
let results = data.results
.map((r) => this.normalizeTv(r as TmdbRawTv))
.filter((r): r is TmdbSearchResult => r !== null);
// Filter and enrich results based on season criteria
if (seasonNumber !== undefined) {
results = await this.filterAndEnrichTvResultsBySeason(results, seasonNumber, seasonYear);
}
return {
page: data.page,
results,
totalPages: data.total_pages,
totalResults: results.length, // Update total to reflect filtered count
};
}
/**
* Multi search (movies, TV shows, and people)
*/
static async searchMulti(query: string, year?: number): Promise<TmdbSearchResponse> {
const params = new URLSearchParams({
query,
language: 'tr-TR',
});
if (year) {
params.append('year', year.toString());
}
const url = `${TMDB_BASE_URL}/search/multi?${params.toString()}`;
logger.info('TMDB: Multi search', { query, year });
const response = await fetch(url, {
method: 'GET',
headers: this.getHeaders(),
});
if (!response.ok) {
const errorText = await response.text();
logger.error('TMDB API error', { status: response.status, error: errorText });
throw new Error(`TMDB API error: ${response.status}`);
}
const data: TmdbRawResponse = await response.json();
// Filter out person results and normalize
const results = data.results
.filter((r) => r.media_type !== 'person')
.map((r) => this.normalizeResult(r))
.filter((r): r is TmdbSearchResult => r !== null);
return {
page: data.page,
results,
totalPages: data.total_pages,
totalResults: data.total_results,
};
}
/**
* Search for content based on type
* @param request Search request with query, year, type, and optional season parameters
*/
static async search(request: TmdbSearchRequest): Promise<TmdbSearchResponse> {
const { query, year, type = 'multi', seasonYear, seasonNumber } = request;
switch (type) {
case 'movie':
return this.searchMovies(query, year);
case 'tv':
// For TV shows, use season parameters if provided
return this.searchTv(query, year, seasonNumber, seasonYear);
case 'multi':
default:
return this.searchMulti(query, year);
}
}
}
export default TmdbService;

210
src/types/index.ts Normal file
View File

@@ -0,0 +1,210 @@
/**
* Type definitions for Netflix Scraper API
*/
// ============================================
// Content Types
// ============================================
export interface ContentData {
id: string;
url: string;
title: string;
year: number | null;
plot: string | null;
backdropUrl: string | null;
ageRating: string | null;
type: 'movie' | 'tvshow';
currentSeason: number | null;
genres: string[];
cast: string[];
createdAt: Date;
updatedAt: Date;
}
export type ContentType = 'movie' | 'tvshow';
export interface ScraperResult {
title: string;
year: number | null;
plot: string | null;
ageRating: string | null;
type: ContentType;
genres: string[];
cast: string[];
backdropUrl: string | null;
currentSeason: number | null;
}
// ============================================
// API Types
// ============================================
export interface ApiResponse<T> {
success: boolean;
data?: T;
error?: ApiError;
}
export interface ApiError {
code: string;
message: string;
details?: Record<string, unknown>;
}
export interface GetInfoRequest {
url: string;
}
export interface GetInfoResponse {
title: string;
year: number | null;
plot: string | null;
ageRating: string | null;
type: ContentType;
genres: string[];
cast: string[];
backdrop: string | null;
currentSeason: number | null;
}
// ============================================
// Cache Types
// ============================================
export interface CacheEntry<T> {
data: T;
cachedAt: number;
ttl: number;
}
export type DataSource = 'cache' | 'database' | 'netflix';
// ============================================
// Socket Event Types
// ============================================
export interface SocketEvents {
// Client -> Server
'job:subscribe': (jobId: string) => void;
'job:unsubscribe': (jobId: string) => void;
// Server -> Client
'job:progress': (data: JobProgress) => void;
'job:completed': (data: JobCompleted) => void;
'job:error': (data: JobError) => void;
}
export interface JobProgress {
jobId: string;
progress: number; // 0-100
status: string;
step: string;
}
export interface JobCompleted {
jobId: string;
data: GetInfoResponse;
source: DataSource;
}
export interface JobError {
jobId: string;
error: ApiError;
}
// ============================================
// Job Types
// ============================================
export type JobStatus = 'pending' | 'processing' | 'completed' | 'failed';
export interface ScrapeJob {
id: string;
url: string;
status: JobStatus;
progress: number;
step: string;
result?: ScraperResult;
error?: ApiError;
createdAt: Date;
updatedAt: Date;
}
// ============================================
// TMDB API Types
// ============================================
export interface TmdbSearchRequest {
query: string;
year?: number;
type?: 'movie' | 'tv' | 'multi';
seasonYear?: number;
seasonNumber?: number;
}
export interface TmdbSearchResult {
id: number;
title: string;
originalTitle: string;
overview: string | null;
releaseDate: string | null;
year: number | null;
type: 'movie' | 'tv';
posterPath: string | null;
backdropPath: string | null;
voteAverage: number;
voteCount: number;
popularity: number;
genres: string[];
originalLanguage: string;
currentSeason?: number | null;
totalSeasons?: number | null;
}
export interface TmdbSearchResponse {
page: number;
results: TmdbSearchResult[];
totalPages: number;
totalResults: number;
}
// Raw TMDB API Response Types
export interface TmdbRawMovie {
id: number;
title: string;
original_title: string;
overview: string | null;
release_date: string;
poster_path: string | null;
backdrop_path: string | null;
vote_average: number;
vote_count: number;
popularity: number;
genre_ids: number[];
original_language: string;
media_type?: 'movie';
}
export interface TmdbRawTv {
id: number;
name: string;
original_name: string;
overview: string | null;
first_air_date: string;
poster_path: string | null;
backdrop_path: string | null;
vote_average: number;
vote_count: number;
popularity: number;
genre_ids: number[];
original_language: string;
media_type?: 'tv';
}
export interface TmdbRawResponse {
page: number;
results: (TmdbRawMovie | TmdbRawTv)[];
total_pages: number;
total_results: number;
}

97
src/utils/logger.ts Normal file
View File

@@ -0,0 +1,97 @@
/**
* Structured JSON Logger
* Standardized log levels: debug, info, warn, error
*/
type LogLevel = 'debug' | 'info' | 'warn' | 'error';
interface LogEntry {
timestamp: string;
level: LogLevel;
message: string;
service: string;
traceId?: string;
[key: string]: unknown;
}
class Logger {
private service: string;
private level: LogLevel;
private levels: Record<LogLevel, number> = {
debug: 0,
info: 1,
warn: 2,
error: 3,
};
constructor(service: string = 'netflix-scraper-api') {
this.service = service;
this.level = (process.env.LOG_LEVEL as LogLevel) || 'info';
}
private shouldLog(level: LogLevel): boolean {
return this.levels[level] >= this.levels[this.level];
}
private formatEntry(level: LogLevel, message: string, data?: Record<string, unknown>): LogEntry {
const entry: LogEntry = {
timestamp: new Date().toISOString(),
level,
message,
service: this.service,
};
if (data) {
Object.assign(entry, data);
}
return entry;
}
private output(entry: LogEntry): void {
const output = JSON.stringify(entry);
if (entry.level === 'error') {
process.stderr.write(output + '\n');
} else {
process.stdout.write(output + '\n');
}
}
debug(message: string, data?: Record<string, unknown>): void {
if (this.shouldLog('debug')) {
this.output(this.formatEntry('debug', message, data));
}
}
info(message: string, data?: Record<string, unknown>): void {
if (this.shouldLog('info')) {
this.output(this.formatEntry('info', message, data));
}
}
warn(message: string, data?: Record<string, unknown>): void {
if (this.shouldLog('warn')) {
this.output(this.formatEntry('warn', message, data));
}
}
error(message: string, data?: Record<string, unknown>): void {
if (this.shouldLog('error')) {
this.output(this.formatEntry('error', message, data));
}
}
withContext(context: Record<string, unknown>): Logger {
const childLogger = new Logger(this.service);
const parentLog = this.formatEntry.bind(this);
childLogger.formatEntry = (level: LogLevel, message: string, data?: Record<string, unknown>) => {
return parentLog(level, message, { ...context, ...data });
};
return childLogger;
}
}
export const logger = new Logger();
export default logger;

27
tsconfig.json Normal file
View File

@@ -0,0 +1,27 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"lib": ["ES2022"],
"outDir": "./dist",
"rootDir": "./src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"resolveJsonModule": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"incremental": true,
"noImplicitAny": true,
"noImplicitReturns": true,
"noUnusedLocals": true,
"noUnusedParameters": true,
"exactOptionalPropertyTypes": false,
"noUncheckedIndexedAccess": true
},
"include": ["src/**/*"],
"exclude": ["node_modules", "dist"]
}

1
tsconfig.tsbuildinfo Normal file

File diff suppressed because one or more lines are too long