From ccc58de90838eccb98a3345dc6550a0eb876b0bb Mon Sep 17 00:00:00 2001 From: admtracksteel Date: Thu, 16 Apr 2026 17:38:01 +0000 Subject: [PATCH] =?UTF-8?q?=F0=9F=9A=80=20Auto-deploy:=20BotVPS=20atualiza?= =?UTF-8?q?do=20em=2016/04/2026=2017:38:01?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- ai_agent.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/ai_agent.py b/ai_agent.py index 6830d5e..bef9ea1 100644 --- a/ai_agent.py +++ b/ai_agent.py @@ -9,10 +9,13 @@ from config import get_config async def get_llm_response_async(prompt: str, provider: str, cfg: dict) -> str: """Invoca o provedor de LLM centralizado em llm_providers.""" - # Garante o modelo gemini-2.5-flash como padrão para o agente Legado - model = cfg.get("model") or "gemini-2.5-flash" - if provider == "ollama": + # Define modelo padrão dependendo do provider + if provider == "openrouter": + model = cfg.get("model") or "qwen/qwen-2.5-72b-instruct" + elif provider == "ollama": model = os.getenv("OLLAMA_MODEL", "llama3.2:1b") + else: + model = cfg.get("model") or "gemini-2.5-flash" return await call_llm(provider, model, prompt) @@ -22,7 +25,7 @@ def query_agent(prompt: str, override_provider=None, chat_history=None) -> str: async def query_agent_async(prompt: str, override_provider=None, chat_history=None) -> str: cfg = get_config() - provider = override_provider or cfg.get("active_provider", "gemini") + provider = override_provider or cfg.get("active_provider", "openrouter") tools_desc = "\n".join([f"- {k}: {v['description']}" for k, v in AVAILABLE_TOOLS.items()]) system_prompt = f"""Você é o Antigravity, um assistente de IA de alto desempenho operando na VPS do Marcos. Sua natureza é dual: