Fix Ollama endpoint: use http://ollama:11434

This commit is contained in:
Marcos
2026-03-22 16:40:27 -03:00
parent a74978da4a
commit 2cc4ed0d18
2 changed files with 2 additions and 2 deletions

View File

@@ -23,7 +23,7 @@ def get_llm_response(prompt: str, provider: str, cfg: dict) -> str:
return f"Erro de Conexão Gemini: {str(e)}"
elif provider == "ollama":
ollama_host = os.getenv("OLLAMA_HOST", "http://ollama-lw4s8g4gc8gss4gkc4gg0wk4:11434")
ollama_host = os.getenv("OLLAMA_HOST", "http://ollama:11434")
try:
res = requests.post(f"{ollama_host}/api/generate", json={
"model": os.getenv("OLLAMA_MODEL", "qwen2.5-coder:1.5b"),