Add Ollama connection check and better error messages

This commit is contained in:
Marcos
2026-03-22 16:31:08 -03:00
parent 0084577a70
commit a74978da4a
3 changed files with 43 additions and 1 deletions

View File

@@ -314,6 +314,22 @@ def _call_anthropic(model: str, prompt: str, system_prompt: str = None) -> str:
# ----------------------------------------
# OLLAMA (LOCAL)
# ----------------------------------------
def check_ollama_connection() -> dict:
"""Verifica se Ollama está acessível."""
endpoint = LLM_PROVIDERS["ollama"]["endpoint"]
try:
res = requests.get(f"{endpoint}/api/tags", timeout=10)
if res.status_code == 200:
models = [m.get("name") for m in res.json().get("models", [])]
return {"status": "ok", "models": models, "endpoint": endpoint}
return {"status": "error", "code": res.status_code, "endpoint": endpoint}
except requests.exceptions.Timeout:
return {"status": "timeout", "endpoint": endpoint}
except requests.exceptions.ConnectionError:
return {"status": "unreachable", "endpoint": endpoint}
except Exception as e:
return {"status": "error", "message": str(e), "endpoint": endpoint}
def _call_ollama(model: str, prompt: str, system_prompt: str = None) -> str:
"""Chama Ollama local."""
endpoint = LLM_PROVIDERS["ollama"]["endpoint"]
@@ -333,6 +349,10 @@ def _call_ollama(model: str, prompt: str, system_prompt: str = None) -> str:
if res.status_code == 200:
return res.json().get("response", "")
return f"Erro Ollama: {res.status_code} - {res.text}"
except requests.exceptions.Timeout:
return f"[TIMEOUT] Ollama não respondeu em 120s. Verifique se o serviço está rodando em {endpoint}"
except requests.exceptions.ConnectionError:
return f"[CONNECTION ERROR] Não conseguiu conectar ao Ollama em {endpoint}. Verifique se o container Ollama está na mesma rede Docker."
except Exception as e:
return f"Erro Ollama: {str(e)}"