From b8a23f6e18731a5ea6296e0543691c4362b118c7 Mon Sep 17 00:00:00 2001 From: admtracksteel Date: Wed, 29 Apr 2026 00:17:12 +0000 Subject: [PATCH] =?UTF-8?q?=F0=9F=9A=80=20Auto-deploy:=20BotVPS=20atualiza?= =?UTF-8?q?do=20em=2029/04/2026=2000:17:12?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- llm_providers.py | 20 ++++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/llm_providers.py b/llm_providers.py index 18ae663..5db5cb2 100644 --- a/llm_providers.py +++ b/llm_providers.py @@ -34,8 +34,8 @@ LLM_PROVIDERS = { "openrouter": { "name": "OpenRouter", "type": "api", - "models": ["inclusionai/ling-2.6-flash:free", "google/gemini-2.0-flash-001", "deepseek/deepseek-chat"], - "default": "inclusionai/ling-2.6-flash:free", + "models": ["qwen/qwen-2.5-72b-instruct", "inclusionai/ling-2.6-flash:free", "google/gemini-2.0-flash-001"], + "default": "qwen/qwen-2.5-72b-instruct", "endpoint": "https://openrouter.ai/api/v1" }, "ollama": { @@ -55,7 +55,7 @@ def get_orchestrator_config() -> dict: """Retorna config do orchestrator.""" cfg = get_config() return cfg.get("orchestrator", { - "planner": {"provider": "openrouter", "model": "inclusionai/ling-2.6-flash:free"}, + "planner": {"provider": "openrouter", "model": "qwen/qwen-2.5-72b-instruct"}, "executor": {"provider": "ollama", "model": "llama3.2:1b"} }) @@ -382,7 +382,19 @@ def get_executor_llm() -> tuple: async def call_planner_async(prompt: str, system_prompt: str = None) -> str: provider, model = get_planner_llm() - return await call_llm(provider, model, prompt, system_prompt) + try: + response = await call_llm(provider, model, prompt, system_prompt) + # Se a resposta indicar um erro de API, disparamos o fallback + if response.startswith("Erro OpenRouter"): + raise Exception(response) + return response + except Exception as e: + # Lógica de FALLBACK: Se o Qwen falhar, tenta o Ling-2.6-flash + if provider == "openrouter" and model == "qwen/qwen-2.5-72b-instruct": + backup_model = "inclusionai/ling-2.6-flash:free" + print(f"⚠️ [FALLBACK] Falha no Qwen ({str(e)}). Tentando {backup_model}...") + return await call_llm("openrouter", backup_model, prompt, system_prompt) + return f"Erro Crítico no Planner: {str(e)}" async def call_executor_async(prompt: str, system_prompt: str = None) -> str: provider, model = get_executor_llm()