feat: backend orkestrasyonunu ve arac entegrasyonlarini genislet
This commit is contained in:
164
backend/app/tools/second_brain.py
Normal file
164
backend/app/tools/second_brain.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
from app.tools.base import Tool
|
||||
|
||||
|
||||
class SecondBrainTool(Tool):
|
||||
name = "second_brain"
|
||||
description = "Search and retrieve context from the configured AnythingLLM workspace."
|
||||
|
||||
def __init__(self, base_url: str, workspace_slug: str, api_key: str) -> None:
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.workspace_slug = workspace_slug.strip().strip("/")
|
||||
self.api_key = api_key.strip()
|
||||
|
||||
def parameters_schema(self) -> dict[str, Any]:
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": "The user question to search in the second brain workspace.",
|
||||
},
|
||||
"mode": {
|
||||
"type": "string",
|
||||
"description": "Workspace chat mode. Prefer query for retrieval-focused lookups.",
|
||||
"enum": ["query", "chat"],
|
||||
},
|
||||
},
|
||||
"required": ["query"],
|
||||
"additionalProperties": False,
|
||||
}
|
||||
|
||||
async def run(self, payload: dict[str, Any]) -> dict[str, Any]:
|
||||
query = str(payload.get("query", "")).strip()
|
||||
mode = str(payload.get("mode", "query") or "query").strip().lower()
|
||||
if mode not in {"query", "chat"}:
|
||||
mode = "query"
|
||||
|
||||
if not query:
|
||||
return {"tool": self.name, "status": "error", "message": "Query is required."}
|
||||
if not self.base_url:
|
||||
return {"tool": self.name, "status": "error", "message": "AnythingLLM base URL is not configured."}
|
||||
if not self.workspace_slug:
|
||||
return {"tool": self.name, "status": "error", "message": "AnythingLLM workspace slug is not configured."}
|
||||
if not self.api_key:
|
||||
return {"tool": self.name, "status": "error", "message": "AnythingLLM API key is not configured."}
|
||||
|
||||
endpoint = f"{self.base_url}/api/v1/workspace/{self.workspace_slug}/chat"
|
||||
instructed_query = self._build_query_prompt(query, mode)
|
||||
headers = {
|
||||
"Authorization": f"Bearer {self.api_key}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload_candidates = [
|
||||
{
|
||||
"message": instructed_query,
|
||||
"mode": mode,
|
||||
"sessionId": None,
|
||||
"attachments": [],
|
||||
},
|
||||
{
|
||||
"message": instructed_query,
|
||||
"mode": "chat",
|
||||
"sessionId": None,
|
||||
"attachments": [],
|
||||
},
|
||||
{
|
||||
"message": instructed_query,
|
||||
"mode": "chat",
|
||||
},
|
||||
]
|
||||
last_error = ""
|
||||
response = None
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
for request_payload in payload_candidates:
|
||||
response = await client.post(endpoint, headers=headers, json=request_payload)
|
||||
if response.is_success:
|
||||
break
|
||||
last_error = self._format_error(response)
|
||||
if response.status_code != 400:
|
||||
response.raise_for_status()
|
||||
else:
|
||||
return {
|
||||
"tool": self.name,
|
||||
"status": "error",
|
||||
"query": query,
|
||||
"workspace_slug": self.workspace_slug,
|
||||
"message": last_error or "AnythingLLM request failed.",
|
||||
}
|
||||
except httpx.HTTPError as exc:
|
||||
return {
|
||||
"tool": self.name,
|
||||
"status": "error",
|
||||
"query": query,
|
||||
"workspace_slug": self.workspace_slug,
|
||||
"message": str(exc),
|
||||
}
|
||||
|
||||
data = response.json() if response is not None else {}
|
||||
text_response = self._extract_text_response(data)
|
||||
sources = self._extract_sources(data)
|
||||
return {
|
||||
"tool": self.name,
|
||||
"status": "ok",
|
||||
"query": query,
|
||||
"mode": mode,
|
||||
"workspace_slug": self.workspace_slug,
|
||||
"context": text_response,
|
||||
"sources": sources,
|
||||
"raw": data,
|
||||
}
|
||||
|
||||
def _build_query_prompt(self, query: str, mode: str) -> str:
|
||||
if mode == "query":
|
||||
return (
|
||||
"Only answer the exact question using the workspace context. "
|
||||
"Do not add commentary, headings, bullets, extra notes, names, or related reminders. "
|
||||
"If the answer contains a date and place, return only that information in one short sentence. "
|
||||
"Question: "
|
||||
f"{query}"
|
||||
)
|
||||
return query
|
||||
|
||||
def _format_error(self, response: httpx.Response) -> str:
|
||||
try:
|
||||
payload = response.json()
|
||||
except ValueError:
|
||||
return f"HTTP {response.status_code}"
|
||||
if isinstance(payload, dict):
|
||||
for key in ("error", "message"):
|
||||
value = payload.get(key)
|
||||
if isinstance(value, str) and value.strip():
|
||||
return value.strip()
|
||||
return f"HTTP {response.status_code}"
|
||||
|
||||
def _extract_text_response(self, data: Any) -> str:
|
||||
if isinstance(data, dict):
|
||||
for key in ("textResponse", "response", "answer", "text", "message"):
|
||||
value = data.get(key)
|
||||
if isinstance(value, str) and value.strip():
|
||||
return value.strip()
|
||||
return ""
|
||||
|
||||
def _extract_sources(self, data: Any) -> list[dict[str, str]]:
|
||||
if not isinstance(data, dict):
|
||||
return []
|
||||
raw_sources = data.get("sources", [])
|
||||
if not isinstance(raw_sources, list):
|
||||
return []
|
||||
sources: list[dict[str, str]] = []
|
||||
for item in raw_sources[:6]:
|
||||
if not isinstance(item, dict):
|
||||
continue
|
||||
sources.append(
|
||||
{
|
||||
"title": str(item.get("title") or item.get("source") or item.get("url") or "").strip(),
|
||||
"url": str(item.get("url") or "").strip(),
|
||||
"snippet": str(item.get("text") or item.get("snippet") or item.get("description") or "").strip(),
|
||||
}
|
||||
)
|
||||
return sources
|
||||
Reference in New Issue
Block a user