feat: backend orkestrasyonunu ve arac entegrasyonlarini genislet

This commit is contained in:
2026-03-22 04:45:43 +03:00
parent d07bc365f5
commit 5f4c19a18d
25 changed files with 3750 additions and 82 deletions

View File

@@ -0,0 +1,218 @@
from __future__ import annotations
import json
from datetime import datetime
from pathlib import Path
import httpx
from sqlalchemy import select
from sqlalchemy.orm import Session
from app.config import get_settings
from app.db import AuditLogORM, SecondBrainCaptureORM, SecondBrainNoteORM, SecretORM, SettingORM
class SecondBrainService:
FILENAME = "second_brain.md"
def __init__(self, session: Session) -> None:
self.session = session
def start_capture(self, telegram_user_id: int) -> str:
record = self.session.get(SecondBrainCaptureORM, telegram_user_id)
if record is None:
record = SecondBrainCaptureORM(
telegram_user_id=telegram_user_id,
created_at=datetime.utcnow(),
updated_at=datetime.utcnow(),
)
self.session.add(record)
else:
record.updated_at = datetime.utcnow()
self.session.add(
AuditLogORM(category="second_brain", message=f"second_brain:capture-start:{telegram_user_id}")
)
self.session.flush()
return "Second brain notunu gonder. Iptal etmek istersen /iptal yazabilirsin."
def is_capture_active(self, telegram_user_id: int) -> bool:
return self.session.get(SecondBrainCaptureORM, telegram_user_id) is not None
def cancel_capture(self, telegram_user_id: int) -> str:
record = self.session.get(SecondBrainCaptureORM, telegram_user_id)
if record is not None:
self.session.delete(record)
self.session.add(
AuditLogORM(category="second_brain", message=f"second_brain:capture-cancel:{telegram_user_id}")
)
self.session.flush()
return "Second brain not ekleme akisini durdurdum."
async def save_note_and_sync(self, telegram_user_id: int, text: str, workspace_root: Path) -> str:
content = text.strip()
if not content:
return "Bos bir not kaydedemem. Lutfen not metnini gonder."
capture = self.session.get(SecondBrainCaptureORM, telegram_user_id)
if capture is not None:
self.session.delete(capture)
note = SecondBrainNoteORM(
telegram_user_id=telegram_user_id,
content=content,
source="telegram",
created_at=datetime.utcnow(),
updated_at=datetime.utcnow(),
)
self.session.add(note)
self.session.flush()
markdown_path = self._write_markdown(workspace_root)
sync_result = await self._sync_markdown(markdown_path)
self.session.add(
AuditLogORM(
category="second_brain",
message=f"second_brain:note-saved:{telegram_user_id}:{note.id}",
)
)
self.session.add(
AuditLogORM(
category="second_brain",
message=f"second_brain:sync:{json.dumps(sync_result, ensure_ascii=False)}",
)
)
self.session.flush()
if sync_result["status"] != "ok":
message = str(sync_result.get("message", "Second brain sync failed."))
return f"Notu kaydettim ama AnythingLLM senkronu basarisiz oldu: {message}"
return "Notunu kaydettim ve ikinci beynine senkronladim."
def _write_markdown(self, workspace_root: Path) -> Path:
notes = list(
self.session.scalars(
select(SecondBrainNoteORM).order_by(SecondBrainNoteORM.created_at.asc(), SecondBrainNoteORM.id.asc())
)
)
lines = [
"# Second Brain",
"",
"WiseClaw tarafindan Telegram notlarindan uretilen senkron belge.",
"",
]
for note in notes:
timestamp = note.created_at.strftime("%Y-%m-%d %H:%M:%S")
lines.extend(
[
f"## Note {note.id} - {timestamp}",
f"- Source: {note.source}",
f"- Telegram User: {note.telegram_user_id}",
"",
note.content,
"",
]
)
markdown_path = workspace_root / "backend" / self.FILENAME
markdown_path.write_text("\n".join(lines).strip() + "\n", encoding="utf-8")
return markdown_path
async def _sync_markdown(self, markdown_path: Path) -> dict[str, object]:
settings = get_settings()
runtime_settings = {
item.key: item.value for item in self.session.scalars(select(SettingORM))
}
base_url = runtime_settings.get("anythingllm_base_url", settings.anythingllm_base_url).rstrip("/")
workspace_slug = runtime_settings.get("anythingllm_workspace_slug", settings.anythingllm_workspace_slug).strip()
secret = self.session.get(SecretORM, "anythingllm_api_key")
api_key = secret.value if secret else settings.anythingllm_api_key
if not base_url:
return {"status": "error", "message": "AnythingLLM base URL is not configured."}
if not workspace_slug:
return {"status": "error", "message": "AnythingLLM workspace slug is not configured."}
if not api_key:
return {"status": "error", "message": "AnythingLLM API key is not configured."}
headers = {"Authorization": f"Bearer {api_key}"}
try:
async with httpx.AsyncClient(timeout=30.0) as client:
workspace_response = await client.get(
f"{base_url}/api/v1/workspace/{workspace_slug}",
headers=headers,
)
workspace_response.raise_for_status()
workspace_payload = workspace_response.json()
deletes = self._find_existing_second_brain_docs(workspace_payload)
if deletes:
delete_response = await client.post(
f"{base_url}/api/v1/workspace/{workspace_slug}/update-embeddings",
headers={**headers, "Content-Type": "application/json"},
json={"deletes": deletes},
)
delete_response.raise_for_status()
with markdown_path.open("rb") as file_handle:
upload_response = await client.post(
f"{base_url}/api/v1/document/upload",
headers=headers,
files={"file": (markdown_path.name, file_handle, "text/markdown")},
)
upload_response.raise_for_status()
upload_payload = upload_response.json()
uploaded_location = self._extract_uploaded_location(upload_payload)
if not uploaded_location:
return {"status": "error", "message": "AnythingLLM upload did not return a document location."}
attach_response = await client.post(
f"{base_url}/api/v1/workspace/{workspace_slug}/update-embeddings",
headers={**headers, "Content-Type": "application/json"},
json={"adds": [uploaded_location]},
)
attach_response.raise_for_status()
except httpx.HTTPError as exc:
return {"status": "error", "message": str(exc)}
return {"status": "ok", "location": uploaded_location, "deleted": deletes}
def _find_existing_second_brain_docs(self, workspace_payload: dict[str, object]) -> list[str]:
documents = []
workspace_items = workspace_payload.get("workspace", [])
if isinstance(workspace_items, list) and workspace_items:
first = workspace_items[0]
if isinstance(first, dict):
documents = first.get("documents", [])
if not isinstance(documents, list):
return []
paths: list[str] = []
for item in documents:
if not isinstance(item, dict):
continue
filename = str(item.get("filename", "")).strip()
docpath = str(item.get("docpath", "")).strip()
metadata_raw = item.get("metadata")
metadata_title = ""
if isinstance(metadata_raw, str):
try:
metadata = json.loads(metadata_raw)
if isinstance(metadata, dict):
metadata_title = str(metadata.get("title", "")).strip()
except json.JSONDecodeError:
metadata_title = ""
if (
filename.startswith(f"{Path(self.FILENAME).stem}.md-")
or filename.startswith(self.FILENAME)
or metadata_title == self.FILENAME
) and docpath:
paths.append(docpath)
return paths
def _extract_uploaded_location(self, payload: dict[str, object]) -> str:
documents = payload.get("documents", [])
if not isinstance(documents, list) or not documents:
return ""
first = documents[0]
if not isinstance(first, dict):
return ""
return str(first.get("location", "")).strip()