From 17dcb9d178dd786dba4192ea3e590d49b10a7cc5 Mon Sep 17 00:00:00 2001 From: Marcos Date: Sun, 22 Mar 2026 16:51:21 -0300 Subject: [PATCH] Increase Ollama timeout to 180s and add num_ctx --- ai_agent.py | 8 +++++--- llm_providers.py | 7 ++++--- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/ai_agent.py b/ai_agent.py index cbb7f71..eae44ae 100644 --- a/ai_agent.py +++ b/ai_agent.py @@ -24,12 +24,14 @@ def get_llm_response(prompt: str, provider: str, cfg: dict) -> str: elif provider == "ollama": ollama_host = os.getenv("OLLAMA_HOST", "http://ollama:11434") + model = os.getenv("OLLAMA_MODEL", "qwen2.5-coder:1.5b") try: res = requests.post(f"{ollama_host}/api/generate", json={ - "model": os.getenv("OLLAMA_MODEL", "qwen2.5-coder:1.5b"), + "model": model, "prompt": prompt, - "stream": False - }, timeout=30) + "stream": False, + "options": {"num_ctx": 4096} + }, timeout=180) if res.status_code == 200: return res.json().get("response", "") return f"Erro Ollama (Status {res.status_code}): {res.text}" diff --git a/llm_providers.py b/llm_providers.py index a1fb928..ff64a72 100644 --- a/llm_providers.py +++ b/llm_providers.py @@ -337,20 +337,21 @@ def _call_ollama(model: str, prompt: str, system_prompt: str = None) -> str: payload = { "model": model, "prompt": prompt, - "stream": False + "stream": False, + "options": {"num_ctx": 4096} } if system_prompt: payload["system"] = system_prompt try: - res = requests.post(f"{endpoint}/api/generate", json=payload, timeout=120) + res = requests.post(f"{endpoint}/api/generate", json=payload, timeout=180) if res.status_code == 200: return res.json().get("response", "") return f"Erro Ollama: {res.status_code} - {res.text}" except requests.exceptions.Timeout: - return f"[TIMEOUT] Ollama não respondeu em 120s. Verifique se o serviço está rodando em {endpoint}" + return f"[TIMEOUT] Ollama não respondeu em 180s. Verifique se o modelo está carregado em {endpoint}" except requests.exceptions.ConnectionError: return f"[CONNECTION ERROR] Não conseguiu conectar ao Ollama em {endpoint}. Verifique se o container Ollama está na mesma rede Docker." except Exception as e: