diff --git a/llm_providers.py b/llm_providers.py index 8dc957b..c271dd0 100644 --- a/llm_providers.py +++ b/llm_providers.py @@ -190,6 +190,12 @@ async def _call_gemini_async(model: str, prompt: str, system_prompt: str = None) payload = { "contents": contents, + "safetySettings": [ + {"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"}, + {"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"}, + {"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"}, + {"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"} + ], "generationConfig": { "temperature": 0.7, "maxOutputTokens": 4096 @@ -200,7 +206,14 @@ async def _call_gemini_async(model: str, prompt: str, system_prompt: str = None) async with httpx.AsyncClient() as client: res = await client.post(url, json=payload, timeout=60) if res.status_code == 200: - return res.json()["candidates"][0]["content"]["parts"][0]["text"] + data = res.json() + try: + candidate = data["candidates"][0] + return candidate["content"]["parts"][0]["text"] + except KeyError: + # Model might have blocked it due to safety or empty response + finish_reason = data.get("candidates", [{}])[0].get("finishReason", "Unknown") + return f"Erro Gemini (Parsing): Resposta sem formato esperado. Motivo/FinishReason: {finish_reason}. Raw: {json.dumps(data)}" return f"Erro Gemini: {res.status_code} - {res.text}" except Exception as e: return f"Erro Gemini: {str(e)}"