From b787cb7baade815835414d22986baaa3ece13499 Mon Sep 17 00:00:00 2001 From: Marcos Date: Sun, 22 Mar 2026 11:26:57 -0300 Subject: [PATCH] Fix: Update Gemini model to stable gemini-1.5-flash --- ai_agent.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ai_agent.py b/ai_agent.py index 5b783f8..f72e1f4 100644 --- a/ai_agent.py +++ b/ai_agent.py @@ -9,7 +9,7 @@ def get_llm_response(prompt: str, provider: str, cfg: dict) -> str: """Invoca o provedor de LLM configurado.""" if provider == "gemini": api_key = cfg.get("gemini_api_key") or os.getenv("GEMINI_API_KEY") - url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key={api_key}" + url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key={api_key}" payload = {"contents": [{"parts": [{"text": prompt}]}]} res = requests.post(url, json=payload) if res.status_code == 200: