feat: backend servis iskeletini ve yönetim uçlarını ekle
This commit is contained in:
37
backend/app/llm/ollama_client.py
Normal file
37
backend/app/llm/ollama_client.py
Normal file
@@ -0,0 +1,37 @@
|
||||
import httpx
|
||||
from httpx import HTTPError
|
||||
|
||||
from app.models import OllamaStatus
|
||||
|
||||
class OllamaClient:
|
||||
def __init__(self, base_url: str) -> None:
|
||||
self.base_url = base_url.rstrip("/")
|
||||
|
||||
async def health(self) -> bool:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{self.base_url}/api/tags")
|
||||
return response.is_success
|
||||
|
||||
async def status(self, model: str) -> OllamaStatus:
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=5.0) as client:
|
||||
response = await client.get(f"{self.base_url}/api/tags")
|
||||
response.raise_for_status()
|
||||
except HTTPError as exc:
|
||||
return OllamaStatus(
|
||||
reachable=False,
|
||||
base_url=self.base_url,
|
||||
model=model,
|
||||
message=f"Ollama unreachable: {exc}",
|
||||
)
|
||||
|
||||
payload = response.json()
|
||||
installed_models = [item.get("name", "") for item in payload.get("models", []) if item.get("name")]
|
||||
has_model = model in installed_models
|
||||
return OllamaStatus(
|
||||
reachable=True,
|
||||
base_url=self.base_url,
|
||||
model=model,
|
||||
installed_models=installed_models,
|
||||
message="Model found." if has_model else "Ollama reachable but model is not installed.",
|
||||
)
|
||||
Reference in New Issue
Block a user