🚀 Auto-deploy: BotVPS atualizado em 16/04/2026 17:33:37
This commit is contained in:
@@ -31,6 +31,13 @@ LLM_PROVIDERS = {
|
||||
"default": "claude-3-5-sonnet-20241022",
|
||||
"endpoint": "https://api.anthropic.com/v1"
|
||||
},
|
||||
"openrouter": {
|
||||
"name": "OpenRouter",
|
||||
"type": "api",
|
||||
"models": ["qwen/qwen-2.5-72b-instruct", "google/gemini-2.0-flash-001"],
|
||||
"default": "qwen/qwen-2.5-72b-instruct",
|
||||
"endpoint": "https://openrouter.ai/api/v1"
|
||||
},
|
||||
"ollama": {
|
||||
"name": "Ollama (Local)",
|
||||
"type": "local",
|
||||
@@ -48,7 +55,7 @@ def get_orchestrator_config() -> dict:
|
||||
"""Retorna config do orchestrator."""
|
||||
cfg = get_config()
|
||||
return cfg.get("orchestrator", {
|
||||
"planner": {"provider": "gemini", "model": "gemini-2.5-flash"},
|
||||
"planner": {"provider": "openrouter", "model": "qwen/qwen-2.5-72b-instruct"},
|
||||
"executor": {"provider": "ollama", "model": "llama3.2:1b"}
|
||||
})
|
||||
|
||||
@@ -108,12 +115,20 @@ def get_api_key(provider: str) -> str:
|
||||
env_vars = {
|
||||
"openai": "OPENAI_API_KEY",
|
||||
"anthropic": "ANTHROPIC_API_KEY",
|
||||
"gemini": "GEMINI_API_KEY"
|
||||
"gemini": "GEMINI_API_KEY",
|
||||
"openrouter": "OPENROUTER_API_KEY"
|
||||
}
|
||||
|
||||
# 3.1 Busca específica do provider
|
||||
if provider in env_vars and os.getenv(env_vars[provider]):
|
||||
return os.getenv(env_vars[provider])
|
||||
|
||||
# 3.2 Busca DINÂMICA (Chaves do Vault do Orquestrador)
|
||||
# Procura qualquer variável que comece com o nome do provedor (ex: openrouter_qwen)
|
||||
for key, value in os.environ.items():
|
||||
if key.lower().startswith(f"{provider}_"):
|
||||
return value
|
||||
|
||||
# 4. Fallback ÚLTIMO RECURSO (Segurança Antigravity)
|
||||
if provider == "gemini":
|
||||
return "AIzaSyA-YHI7CDp7bAZz-2U9IRjzMrmzhAM7zkA"
|
||||
@@ -176,9 +191,42 @@ async def call_llm(provider: str, model: str, prompt: str, system_prompt: str =
|
||||
return await _call_anthropic_async(model, prompt, system_prompt)
|
||||
elif provider == "ollama":
|
||||
return await _call_ollama_async(model, prompt, system_prompt)
|
||||
elif provider == "openrouter":
|
||||
return await _call_openrouter_async(model, prompt, system_prompt)
|
||||
else:
|
||||
return f"Erro: Provider '{provider}' não suportado."
|
||||
|
||||
async def _call_openrouter_async(model: str, prompt: str, system_prompt: str = None) -> str:
|
||||
"""Chama API do OpenRouter (OpenAI Compatible) via httpx (async)."""
|
||||
api_key = get_api_key("openrouter")
|
||||
url = "https://openrouter.ai/api/v1/chat/completions"
|
||||
|
||||
messages = []
|
||||
if system_prompt:
|
||||
messages.append({"role": "system", "content": system_prompt})
|
||||
messages.append({"role": "user", "content": prompt})
|
||||
|
||||
payload = {
|
||||
"model": model,
|
||||
"messages": messages,
|
||||
"temperature": 0.7
|
||||
}
|
||||
headers = {
|
||||
"Authorization": f"Bearer {api_key}",
|
||||
"HTTP-Referer": "https://botvps.cloud", # Requisito OpenRouter
|
||||
"X-Title": "BotVPS Factory",
|
||||
"Content-Type": "application/json"
|
||||
}
|
||||
|
||||
try:
|
||||
async with httpx.AsyncClient() as client:
|
||||
res = await client.post(url, json=payload, headers=headers, timeout=120)
|
||||
if res.status_code == 200:
|
||||
return res.json()["choices"][0]["message"]["content"]
|
||||
return f"Erro OpenRouter: {res.status_code} - {res.text}"
|
||||
except Exception as e:
|
||||
return f"Erro OpenRouter: {str(e)}"
|
||||
|
||||
async def _call_gemini_async(model: str, prompt: str, system_prompt: str = None) -> str:
|
||||
"""Chama API do Google Gemini via httpx (async)."""
|
||||
api_key = get_api_key("gemini")
|
||||
|
||||
Reference in New Issue
Block a user