feat(backend): dosya sistemi tabanlı veri kalıcılığı ekle

Deployment ve job verilerinin dosya sisteminde JSON formatında saklanması
ve uygulama başladığında bu verilerin otomatik olarak yüklenmesi özelliği
eklendi.

- Deployment ve job metadata'ları dosya sisteminde saklanır
- Run geçmişi dosya sisteminde JSON olarak tutulur
- Uygulama başlangıcında dosya sistemi taranır ve eksik veriler yüklenir
- Git'ten repo URL ve branch bilgileri çıkarılabilir
- Commit mesajları normalize edilir
- Ayarlar (webhook token/secret) dosya sisteminde saklanır
This commit is contained in:
2026-01-31 07:17:27 +00:00
parent 535b5cbdc2
commit 003ddfcbd1
9 changed files with 622 additions and 22 deletions

View File

@@ -126,7 +126,10 @@ async function start() {
try {
await mongoose.connect(config.mongoUri);
console.log("MongoDB'ye bağlanıldı");
await jobService.bootstrapFromFilesystem();
await jobService.bootstrap();
await deploymentService.normalizeExistingCommitMessages();
await deploymentService.bootstrapFromFilesystem();
server.listen(config.port, () => {
console.log(`Sunucu ${config.port} portunda çalışıyor`);

View File

@@ -124,7 +124,11 @@ router.get("/metrics/summary", async (req, res) => {
router.get("/", async (_req, res) => {
authMiddleware(_req, res, async () => {
const projects = await DeploymentProject.find().sort({ createdAt: -1 }).lean();
let projects = await DeploymentProject.find().sort({ createdAt: -1 }).lean();
if (projects.length === 0) {
await deploymentService.bootstrapFromFilesystem();
projects = await DeploymentProject.find().sort({ createdAt: -1 }).lean();
}
return res.json(projects);
});
});
@@ -216,7 +220,7 @@ router.post("/:id/run", async (req, res) => {
const project = await DeploymentProject.findById(id);
if (!project) return res.status(404).json({ message: "Deployment bulunamadı" });
const rawMessage = typeof req.body?.message === "string" ? req.body.message.trim() : "";
const message = rawMessage || "Elle deploy tetikleme";
const message = rawMessage || "manual deploy trigger";
deploymentService
.runDeployment(id, { message })
.catch(() => undefined);

View File

@@ -9,7 +9,11 @@ const router = Router();
router.use(authMiddleware);
router.get("/", async (_req, res) => {
const jobs = await Job.find().sort({ createdAt: -1 }).lean();
let jobs = await Job.find().sort({ createdAt: -1 }).lean();
if (jobs.length === 0) {
await jobService.bootstrapFromFilesystem();
jobs = await Job.find().sort({ createdAt: -1 }).lean();
}
const counts = await JobRun.aggregate([
{ $group: { _id: "$job", runCount: { $sum: 1 } } }
]);
@@ -87,6 +91,7 @@ router.post("/", async (req, res) => {
}
try {
const job = await Job.create({ name, repoUrl, testCommand, checkValue, checkUnit });
await jobService.persistMetadata(job);
jobService.scheduleJob(job);
// Yeni job oluşturulduğunda ilk test otomatik tetiklensin
jobService.runJob(job._id.toString()).catch(() => undefined);
@@ -106,6 +111,7 @@ router.put("/:id", async (req, res) => {
{ new: true, runValidators: true }
);
if (!job) return res.status(404).json({ message: "Job bulunamadı" });
await jobService.persistMetadata(job);
jobService.scheduleJob(job);
return res.json(job);
} catch (err) {

View File

@@ -1,6 +1,6 @@
import { Router, Request } from "express";
import crypto from "crypto";
import { deploymentService } from "../services/deploymentService.js";
import { deploymentService, normalizeCommitMessage } from "../services/deploymentService.js";
const router = Router();
@@ -18,6 +18,12 @@ function verifySignature(rawBody: Buffer, secret: string, signature: string) {
return crypto.timingSafeEqual(Buffer.from(cleaned), Buffer.from(expected));
}
function normalizeBranch(value: string | undefined) {
const raw = (value || "").trim();
if (!raw) return "";
return raw.startsWith("refs/heads/") ? raw.replace("refs/heads/", "") : raw;
}
router.post("/api/deployments/webhook/:token", async (req, res) => {
const { token } = req.params;
const settings = await deploymentService.ensureSettings();
@@ -46,14 +52,16 @@ router.post("/api/deployments/webhook/:token", async (req, res) => {
const payload = req.body as { ref?: string; head_commit?: { message?: string }; commits?: Array<{ message?: string }> };
const ref = payload?.ref || "";
const branch = ref.startsWith("refs/heads/") ? ref.replace("refs/heads/", "") : ref;
const commitMessage =
const branch = normalizeBranch(ref);
const commitMessageRaw =
payload?.head_commit?.message || payload?.commits?.[payload.commits.length - 1]?.message;
const commitMessage = normalizeCommitMessage(commitMessageRaw);
const project = await deploymentService.findByWebhookToken(token);
if (!project) return res.status(404).json({ message: "Deployment bulunamadi" });
if (branch && branch !== project.branch) {
const projectBranch = normalizeBranch(project.branch);
if (projectBranch && projectBranch !== "*" && branch && branch !== projectBranch) {
return res.json({ ignored: true });
}

View File

@@ -10,12 +10,191 @@ import {
ComposeFile,
DeploymentEnv
} from "../models/deploymentProject.js";
import { DeploymentRun } from "../models/deploymentRun.js";
import { DeploymentRun, DeploymentRunDocument } from "../models/deploymentRun.js";
import { Settings } from "../models/settings.js";
const composeFileCandidates: ComposeFile[] = ["docker-compose.yml", "docker-compose.dev.yml"];
const deploymentsRoot = config.deploymentsRoot;
const metadataFileName = ".wisecolt-ci.json";
const settingsFileName = ".wisecolt-ci-settings.json";
const runsDirName = ".wisecolt-ci-runs";
export function normalizeCommitMessage(message?: string) {
if (!message) return undefined;
const firstLine = message.split(/\r?\n/)[0]?.trim();
return firstLine || undefined;
}
type DeploymentMetadata = {
name: string;
repoUrl: string;
branch: string;
composeFile: ComposeFile;
webhookToken: string;
env: DeploymentEnv;
port?: number;
envContent?: string;
envExampleName?: string;
};
type SettingsMetadata = {
webhookToken: string;
webhookSecret: string;
};
type StoredRun = {
status: "running" | "success" | "failed";
message?: string;
logs: string[];
startedAt: string;
finishedAt?: string;
durationMs?: number;
createdAt: string;
updatedAt: string;
};
async function readMetadata(repoDir: string): Promise<DeploymentMetadata | null> {
const filePath = path.join(repoDir, metadataFileName);
if (!fs.existsSync(filePath)) return null;
try {
const raw = await fs.promises.readFile(filePath, "utf8");
const parsed = JSON.parse(raw) as DeploymentMetadata;
if (!parsed?.repoUrl || !parsed?.composeFile) return null;
return parsed;
} catch {
return null;
}
}
async function writeMetadata(repoDir: string, data: DeploymentMetadata) {
const filePath = path.join(repoDir, metadataFileName);
const payload = JSON.stringify(data, null, 2);
await fs.promises.writeFile(filePath, payload, "utf8");
}
function getRunsDir(repoDir: string) {
return path.join(repoDir, runsDirName);
}
function serializeRun(run: DeploymentRunDocument) {
return {
status: run.status,
message: run.message,
logs: run.logs || [],
startedAt: new Date(run.startedAt).toISOString(),
finishedAt: run.finishedAt ? new Date(run.finishedAt).toISOString() : undefined,
durationMs: run.durationMs,
createdAt: new Date(run.createdAt).toISOString(),
updatedAt: new Date(run.updatedAt).toISOString()
} satisfies StoredRun;
}
async function writeRunFile(repoDir: string, run: DeploymentRunDocument) {
const dir = getRunsDir(repoDir);
await fs.promises.mkdir(dir, { recursive: true });
const data = serializeRun(run);
const name = `${new Date(data.startedAt).getTime()}-${run._id.toString()}.json`;
const filePath = path.join(dir, name);
await fs.promises.writeFile(filePath, JSON.stringify(data, null, 2), "utf8");
}
async function readStoredRuns(repoDir: string): Promise<StoredRun[]> {
const dir = getRunsDir(repoDir);
if (!fs.existsSync(dir)) return [];
const entries = await fs.promises.readdir(dir);
const items: StoredRun[] = [];
for (const entry of entries) {
if (!entry.endsWith(".json")) continue;
try {
const raw = await fs.promises.readFile(path.join(dir, entry), "utf8");
const parsed = JSON.parse(raw) as StoredRun;
if (!parsed?.startedAt || !parsed?.status) continue;
items.push(parsed);
} catch {
// ignore invalid file
}
}
return items;
}
async function readSettingsFile(): Promise<SettingsMetadata | null> {
const filePath = path.join(deploymentsRoot, settingsFileName);
if (!fs.existsSync(filePath)) return null;
try {
const raw = await fs.promises.readFile(filePath, "utf8");
const parsed = JSON.parse(raw) as SettingsMetadata;
if (!parsed?.webhookToken || !parsed?.webhookSecret) return null;
return parsed;
} catch {
return null;
}
}
async function writeSettingsFile(data: SettingsMetadata) {
await fs.promises.mkdir(deploymentsRoot, { recursive: true });
const filePath = path.join(deploymentsRoot, settingsFileName);
const payload = JSON.stringify(data, null, 2);
await fs.promises.writeFile(filePath, payload, "utf8");
}
function inferComposeFile(repoDir: string): ComposeFile | null {
const prod = path.join(repoDir, "docker-compose.yml");
if (fs.existsSync(prod)) return "docker-compose.yml";
const dev = path.join(repoDir, "docker-compose.dev.yml");
if (fs.existsSync(dev)) return "docker-compose.dev.yml";
return null;
}
async function inferRepoUrlFromGit(repoDir: string): Promise<string | null> {
const gitConfig = path.join(repoDir, ".git", "config");
if (!fs.existsSync(gitConfig)) return null;
try {
const content = await fs.promises.readFile(gitConfig, "utf8");
const lines = content.split(/\r?\n/);
let inOrigin = false;
for (const line of lines) {
const trimmed = line.trim();
if (trimmed.startsWith("[remote \"")) {
inOrigin = trimmed === "[remote \"origin\"]";
continue;
}
if (!inOrigin) continue;
if (trimmed.startsWith("url")) {
const parts = trimmed.split("=");
const value = parts.slice(1).join("=").trim();
return value || null;
}
}
return null;
} catch {
return null;
}
}
async function inferBranchFromGit(repoDir: string): Promise<string | null> {
const headPath = path.join(repoDir, ".git", "HEAD");
if (!fs.existsSync(headPath)) return null;
try {
const head = (await fs.promises.readFile(headPath, "utf8")).trim();
if (!head.startsWith("ref:")) return null;
const ref = head.replace("ref:", "").trim();
const prefix = "refs/heads/";
if (ref.startsWith(prefix)) {
return ref.slice(prefix.length);
}
return null;
} catch {
return null;
}
}
function inferName(repoUrl: string, rootPath: string) {
const normalized = repoUrl.replace(/\/+$/, "");
const lastPart = normalized.split("/").pop() || "";
const cleaned = lastPart.replace(/\.git$/i, "");
return cleaned || path.basename(rootPath);
}
function slugify(value: string) {
return value
@@ -120,11 +299,37 @@ async function ensureRepo(project: DeploymentProjectDocument, onData: (line: str
if (!exists) {
const entries = await fs.promises.readdir(repoDir);
if (entries.length > 0) {
const allowed = new Set<string>([metadataFileName, ".env", ".env.local", runsDirName]);
const blocking = entries.filter((name) => !allowed.has(name));
if (blocking.length > 0) {
throw new Error("Repo klasoru git olmayan dosyalar iceriyor");
}
let envBackup: string | null = null;
const envPath = path.join(repoDir, ".env");
if (fs.existsSync(envPath)) {
envBackup = await fs.promises.readFile(envPath, "utf8");
}
await Promise.all(
entries
.filter((name) => allowed.has(name))
.map((name) => fs.promises.rm(path.join(repoDir, name), { force: true }))
);
onData(`Repo klonlanıyor: ${project.repoUrl}`);
await runCommand(`git clone --branch ${project.branch} ${project.repoUrl} .`, repoDir, onData);
if (envBackup) {
await fs.promises.writeFile(envPath, envBackup, "utf8");
}
await writeMetadata(repoDir, {
name: project.name,
repoUrl: project.repoUrl,
branch: project.branch,
composeFile: project.composeFile,
webhookToken: project.webhookToken,
env: project.env,
port: project.port,
envContent: project.envContent,
envExampleName: project.envExampleName
});
} else {
onData("Repo güncelleniyor (git fetch/pull)...");
await runCommand(`git fetch origin ${project.branch}`, repoDir, onData);
@@ -247,10 +452,24 @@ class DeploymentService {
async ensureSettings() {
const existing = await Settings.findOne();
if (existing) return existing;
const fileSettings = await readSettingsFile();
if (fileSettings) {
const createdFromFile = await Settings.create({
webhookToken: fileSettings.webhookToken,
webhookSecret: fileSettings.webhookSecret
});
return createdFromFile;
}
const created = await Settings.create({
webhookToken: generateApiToken(),
webhookSecret: generateSecret()
});
await writeSettingsFile({
webhookToken: created.webhookToken,
webhookSecret: created.webhookSecret
});
return created;
}
@@ -258,6 +477,10 @@ class DeploymentService {
const settings = await this.ensureSettings();
settings.webhookToken = generateApiToken();
await settings.save();
await writeSettingsFile({
webhookToken: settings.webhookToken,
webhookSecret: settings.webhookSecret
});
return settings;
}
@@ -265,6 +488,10 @@ class DeploymentService {
const settings = await this.ensureSettings();
settings.webhookSecret = generateSecret();
await settings.save();
await writeSettingsFile({
webhookToken: settings.webhookToken,
webhookSecret: settings.webhookSecret
});
return settings;
}
@@ -301,7 +528,7 @@ class DeploymentService {
}
const env = deriveEnv(input.composeFile);
return DeploymentProject.create({
const created = await DeploymentProject.create({
name: input.name,
rootPath,
repoUrl,
@@ -313,6 +540,18 @@ class DeploymentService {
envContent: input.envContent,
envExampleName: input.envExampleName
});
await writeMetadata(rootPath, {
name: created.name,
repoUrl: created.repoUrl,
branch: created.branch,
composeFile: created.composeFile,
webhookToken: created.webhookToken,
env: created.env,
port: created.port,
envContent: created.envContent,
envExampleName: created.envExampleName
});
return created;
}
async updateProject(
@@ -355,6 +594,19 @@ class DeploymentService {
},
{ new: true, runValidators: true }
);
if (updated) {
await writeMetadata(updated.rootPath, {
name: updated.name,
repoUrl: updated.repoUrl,
branch: updated.branch,
composeFile: updated.composeFile,
webhookToken: updated.webhookToken,
env: updated.env,
port: updated.port,
envContent: updated.envContent,
envExampleName: updated.envExampleName
});
}
return updated;
}
@@ -370,6 +622,7 @@ class DeploymentService {
return;
}
const normalizedMessage = normalizeCommitMessage(options?.message);
const startedAt = Date.now();
const runLogs: string[] = [];
const pushLog = (line: string) => {
@@ -381,17 +634,18 @@ class DeploymentService {
project: projectId,
status: "running",
startedAt: new Date(),
message: options?.message
message: normalizedMessage ?? options?.message
});
this.emitRun(projectId, runDoc);
await writeRunFile(project.rootPath, runDoc);
await DeploymentProject.findByIdAndUpdate(projectId, {
lastStatus: "running",
lastMessage: options?.message || "Deploy başlıyor..."
lastMessage: normalizedMessage ?? options?.message ?? "Deploy başlıyor..."
});
await this.emitStatus(projectId, {
lastStatus: "running",
lastMessage: options?.message || "Deploy başlıyor..."
lastMessage: normalizedMessage ?? options?.message ?? "Deploy başlıyor..."
} as DeploymentProjectDocument);
try {
@@ -406,22 +660,23 @@ class DeploymentService {
await DeploymentProject.findByIdAndUpdate(projectId, {
lastStatus: "success",
lastDeployAt: new Date(),
lastMessage: options?.message || "Başarılı"
lastMessage: normalizedMessage ?? options?.message ?? "Başarılı"
});
await this.emitStatus(projectId, {
lastStatus: "success",
lastDeployAt: new Date(),
lastMessage: options?.message || "Başarılı"
lastMessage: normalizedMessage ?? options?.message ?? "Başarılı"
} as DeploymentProjectDocument);
await DeploymentRun.findByIdAndUpdate(runDoc._id, {
status: "success",
finishedAt: new Date(),
durationMs: duration,
logs: runLogs,
message: options?.message
message: normalizedMessage ?? options?.message
});
const updatedRun = await DeploymentRun.findById(runDoc._id);
if (updatedRun) this.emitRun(projectId, updatedRun);
if (updatedRun) await writeRunFile(project.rootPath, updatedRun);
pushLog("Deploy tamamlandı: Başarılı");
} catch (err) {
const duration = Date.now() - startedAt;
@@ -440,10 +695,11 @@ class DeploymentService {
finishedAt: new Date(),
durationMs: duration,
logs: runLogs,
message: options?.message
message: normalizedMessage ?? options?.message
});
const updatedRun = await DeploymentRun.findById(runDoc._id);
if (updatedRun) this.emitRun(projectId, updatedRun);
if (updatedRun) await writeRunFile(project.rootPath, updatedRun);
pushLog(`Hata: ${(err as Error).message}`);
} finally {
this.running.delete(projectId);
@@ -465,6 +721,131 @@ class DeploymentService {
async findByWebhookToken(token: string) {
return DeploymentProject.findOne({ webhookToken: token });
}
async normalizeExistingCommitMessages() {
const projects = await DeploymentProject.find({
lastMessage: { $regex: /[\r\n]/ }
});
for (const project of projects) {
const normalized = normalizeCommitMessage(project.lastMessage);
if (normalized && normalized !== project.lastMessage) {
project.lastMessage = normalized;
await project.save();
}
}
const runs = await DeploymentRun.find({
message: { $regex: /[\r\n]/ }
});
for (const run of runs) {
const normalized = normalizeCommitMessage(run.message);
if (normalized && normalized !== run.message) {
run.message = normalized;
await run.save();
}
}
}
async bootstrapFromFilesystem() {
const candidateRoots = [
deploymentsRoot,
path.resolve(process.cwd(), "deployments"),
path.resolve(process.cwd(), "..", "deployments"),
path.resolve(process.cwd(), "..", "..", "deployments"),
"/root/Wisecolt-CI/deployments"
];
const roots = Array.from(
new Set(candidateRoots.filter((root) => root && fs.existsSync(root)))
);
for (const root of roots) {
const entries = await fs.promises.readdir(root, { withFileTypes: true });
const dirs = entries.filter((entry) => entry.isDirectory());
for (const entry of dirs) {
const rootPath = path.join(root, entry.name);
const existing = await DeploymentProject.findOne({ rootPath });
if (existing) continue;
const metadata = await readMetadata(rootPath);
const repoUrlRaw = metadata?.repoUrl || (await inferRepoUrlFromGit(rootPath));
if (!repoUrlRaw) continue;
const repoUrl = normalizeRepoUrl(repoUrlRaw);
const repoExisting = await DeploymentProject.findOne({ repoUrl });
if (repoExisting) continue;
const composeFile = metadata?.composeFile || inferComposeFile(rootPath);
if (!composeFile) continue;
const branch = metadata?.branch || (await inferBranchFromGit(rootPath)) || "main";
const name = metadata?.name || inferName(repoUrl, rootPath);
let webhookToken = metadata?.webhookToken || generateWebhookToken();
while (await DeploymentProject.findOne({ webhookToken })) {
webhookToken = generateWebhookToken();
}
let envContent = metadata?.envContent;
const envPath = path.join(rootPath, ".env");
if (!envContent && fs.existsSync(envPath)) {
envContent = await fs.promises.readFile(envPath, "utf8");
}
const envExampleName = metadata?.envExampleName;
const env = deriveEnv(composeFile);
const created = await DeploymentProject.create({
name,
rootPath,
repoUrl,
branch,
composeFile,
webhookToken,
env,
port: metadata?.port,
envContent,
envExampleName
});
await writeMetadata(rootPath, {
name: created.name,
repoUrl: created.repoUrl,
branch: created.branch,
composeFile: created.composeFile,
webhookToken: created.webhookToken,
env: created.env,
port: created.port,
envContent: created.envContent,
envExampleName: created.envExampleName
});
const storedRuns = await readStoredRuns(rootPath);
if (storedRuns.length > 0) {
storedRuns.sort(
(a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime()
);
await DeploymentRun.insertMany(
storedRuns.map((run) => ({
project: created._id,
status: run.status,
message: run.message,
logs: run.logs || [],
startedAt: new Date(run.startedAt),
finishedAt: run.finishedAt ? new Date(run.finishedAt) : undefined,
durationMs: run.durationMs,
createdAt: new Date(run.createdAt),
updatedAt: new Date(run.updatedAt)
}))
);
const latest = storedRuns[0];
await DeploymentProject.findByIdAndUpdate(created._id, {
lastStatus: latest.status,
lastDeployAt: new Date(latest.finishedAt || latest.startedAt),
lastMessage: latest.message
});
}
}
}
}
}
export const deploymentService = new DeploymentService();

View File

@@ -3,9 +3,11 @@ import path from "path";
import { spawn } from "child_process";
import { Server } from "socket.io";
import { Job, JobDocument, TimeUnit } from "../models/job.js";
import { JobRun } from "../models/jobRun.js";
import { JobRun, JobRunDocument } from "../models/jobRun.js";
const repoBaseDir = path.join(process.cwd(), "test-runs");
const jobMetadataFileName = ".wisecolt-ci-job.json";
const jobRunsDirName = ".wisecolt-ci-job-runs";
function unitToMs(unit: TimeUnit) {
if (unit === "dakika") return 60_000;
@@ -17,6 +19,91 @@ function ensureDir(dir: string) {
return fs.promises.mkdir(dir, { recursive: true });
}
type JobMetadata = {
name: string;
repoUrl: string;
testCommand: string;
checkValue: number;
checkUnit: TimeUnit;
};
type StoredJobRun = {
status: "running" | "success" | "failed";
logs: string[];
startedAt: string;
finishedAt?: string;
durationMs?: number;
createdAt: string;
updatedAt: string;
};
function getJobDir(jobId: string) {
return path.join(repoBaseDir, jobId);
}
function getJobRunsDir(jobDir: string) {
return path.join(jobDir, jobRunsDirName);
}
async function readJobMetadata(jobDir: string): Promise<JobMetadata | null> {
const filePath = path.join(jobDir, jobMetadataFileName);
if (!fs.existsSync(filePath)) return null;
try {
const raw = await fs.promises.readFile(filePath, "utf8");
const parsed = JSON.parse(raw) as JobMetadata;
if (!parsed?.repoUrl || !parsed?.testCommand) return null;
return parsed;
} catch {
return null;
}
}
async function writeJobMetadata(jobDir: string, data: JobMetadata) {
await ensureDir(jobDir);
const filePath = path.join(jobDir, jobMetadataFileName);
await fs.promises.writeFile(filePath, JSON.stringify(data, null, 2), "utf8");
}
function serializeJobRun(run: JobRunDocument) {
return {
status: run.status,
logs: run.logs || [],
startedAt: new Date(run.startedAt).toISOString(),
finishedAt: run.finishedAt ? new Date(run.finishedAt).toISOString() : undefined,
durationMs: run.durationMs,
createdAt: new Date(run.createdAt).toISOString(),
updatedAt: new Date(run.updatedAt).toISOString()
} satisfies StoredJobRun;
}
async function writeJobRunFile(jobDir: string, run: JobRunDocument) {
const dir = getJobRunsDir(jobDir);
await ensureDir(dir);
const data = serializeJobRun(run);
const name = `${new Date(data.startedAt).getTime()}-${run._id.toString()}.json`;
const filePath = path.join(dir, name);
await fs.promises.writeFile(filePath, JSON.stringify(data, null, 2), "utf8");
}
async function readStoredJobRuns(jobDir: string): Promise<StoredJobRun[]> {
const dir = getJobRunsDir(jobDir);
if (!fs.existsSync(dir)) return [];
const entries = await fs.promises.readdir(dir);
const items: StoredJobRun[] = [];
for (const entry of entries) {
if (!entry.endsWith(".json")) continue;
try {
const raw = await fs.promises.readFile(path.join(dir, entry), "utf8");
const parsed = JSON.parse(raw) as StoredJobRun;
if (!parsed?.startedAt || !parsed?.status) continue;
items.push(parsed);
} catch {
// ignore invalid file
}
}
return items;
}
function cleanOutput(input: string) {
// ANSI escape sequences temizleme
return input.replace(
@@ -85,8 +172,42 @@ async function cloneOrPull(job: JobDocument, onData: (chunk: string) => void) {
const exists = fs.existsSync(gitDir);
if (!exists) {
const entries = await fs.promises.readdir(repoDir);
const allowed = new Set<string>([jobMetadataFileName, jobRunsDirName]);
const blocking = entries.filter((name) => !allowed.has(name));
if (blocking.length > 0) {
throw new Error("Repo klasoru git olmayan dosyalar iceriyor");
}
let metadataBackup: string | null = null;
const metadataPath = path.join(repoDir, jobMetadataFileName);
if (fs.existsSync(metadataPath)) {
metadataBackup = await fs.promises.readFile(metadataPath, "utf8");
}
let runsBackupPath: string | null = null;
const runsDir = path.join(repoDir, jobRunsDirName);
if (fs.existsSync(runsDir)) {
const tmpBase = await fs.promises.mkdtemp(path.join(repoBaseDir, ".tmp-"));
runsBackupPath = path.join(tmpBase, jobRunsDirName);
await fs.promises.rename(runsDir, runsBackupPath);
}
await Promise.all(
entries
.filter((name) => allowed.has(name))
.map((name) => fs.promises.rm(path.join(repoDir, name), { recursive: true, force: true }))
);
onData(`Repo klonlanıyor: ${job.repoUrl}`);
await runCommand(`git clone ${job.repoUrl} ${repoDir}`, process.cwd(), onData);
if (metadataBackup) {
await fs.promises.writeFile(metadataPath, metadataBackup, "utf8");
}
if (runsBackupPath) {
await fs.promises.rename(runsBackupPath, runsDir);
}
} else {
onData("Repo güncelleniyor (git pull)...");
await runCommand("git pull", repoDir, onData);
@@ -156,6 +277,7 @@ class JobService {
status: "running",
startedAt: new Date()
});
await writeJobRunFile(getJobDir(jobId), runDoc);
await Job.findByIdAndUpdate(jobId, { status: "running", lastMessage: "Çalıştırılıyor..." });
await this.emitStatus(jobId, { status: "running", lastMessage: "Çalıştırılıyor..." } as JobDocument);
@@ -179,6 +301,8 @@ class JobService {
durationMs: duration,
logs: runLogs
});
const updatedRun = await JobRun.findById(runDoc._id);
if (updatedRun) await writeJobRunFile(getJobDir(jobId), updatedRun);
await this.emitStatus(jobId, {
status: "success",
lastRunAt: new Date(),
@@ -199,6 +323,8 @@ class JobService {
durationMs: duration,
logs: runLogs
});
const updatedRun = await JobRun.findById(runDoc._id);
if (updatedRun) await writeJobRunFile(getJobDir(jobId), updatedRun);
pushLog(`Hata: ${(err as Error).message}`);
await this.emitStatus(jobId, {
status: "failed",
@@ -231,6 +357,78 @@ class JobService {
const jobs = await Job.find();
jobs.forEach((job) => this.scheduleJob(job));
}
async persistMetadata(job: JobDocument) {
await writeJobMetadata(getJobDir(job._id.toString()), {
name: job.name,
repoUrl: job.repoUrl,
testCommand: job.testCommand,
checkValue: job.checkValue,
checkUnit: job.checkUnit
});
}
async bootstrapFromFilesystem() {
const candidateRoots = [
repoBaseDir,
path.resolve(process.cwd(), "test-runs"),
path.resolve(process.cwd(), "..", "test-runs"),
path.resolve(process.cwd(), "..", "..", "test-runs"),
"/root/Wisecolt-CI/test-runs"
];
const roots = Array.from(
new Set(candidateRoots.filter((root) => root && fs.existsSync(root)))
);
for (const root of roots) {
const entries = await fs.promises.readdir(root, { withFileTypes: true });
const dirs = entries.filter((entry) => entry.isDirectory());
for (const entry of dirs) {
const jobDir = path.join(root, entry.name);
const metadata = await readJobMetadata(jobDir);
if (!metadata) continue;
const existing = await Job.findOne({ repoUrl: metadata.repoUrl });
if (existing) continue;
const created = await Job.create({
name: metadata.name,
repoUrl: metadata.repoUrl,
testCommand: metadata.testCommand,
checkValue: metadata.checkValue,
checkUnit: metadata.checkUnit
});
await this.persistMetadata(created);
const storedRuns = await readStoredJobRuns(jobDir);
if (storedRuns.length > 0) {
storedRuns.sort(
(a, b) => new Date(b.startedAt).getTime() - new Date(a.startedAt).getTime()
);
await JobRun.insertMany(
storedRuns.map((run) => ({
job: created._id,
status: run.status,
logs: run.logs || [],
startedAt: new Date(run.startedAt),
finishedAt: run.finishedAt ? new Date(run.finishedAt) : undefined,
durationMs: run.durationMs,
createdAt: new Date(run.createdAt),
updatedAt: new Date(run.updatedAt)
}))
);
const latest = storedRuns[0];
await Job.findByIdAndUpdate(created._id, {
status: latest.status === "running" ? "idle" : latest.status,
lastRunAt: new Date(latest.finishedAt || latest.startedAt),
lastDurationMs: latest.durationMs,
lastMessage: latest.status === "success" ? "Başarılı" : "Hata"
});
}
}
}
}
}
export const jobService = new JobService();