🚀 Auto-deploy: BotVPS atualizado em 24/03/2026 21:36:58

This commit is contained in:
2026-03-24 21:36:58 +00:00
parent c2273a13f0
commit 27d12ff9c4
4 changed files with 40 additions and 14 deletions

View File

@@ -211,7 +211,13 @@ async def _call_gemini_async(model: str, prompt: str, system_prompt: str = None)
if system_prompt:
contents.insert(0, {"role": "model", "parts": [{"text": system_prompt}]})
payload = {"contents": contents}
payload = {
"contents": contents,
"generationConfig": {
"temperature": 0.7,
"maxOutputTokens": 4096
}
}
try:
async with httpx.AsyncClient() as client:
@@ -232,7 +238,12 @@ async def _call_openai_async(model: str, prompt: str, system_prompt: str = None)
messages.append({"role": "system", "content": system_prompt})
messages.append({"role": "user", "content": prompt})
payload = {"model": model, "messages": messages, "temperature": 0.7}
payload = {
"model": model,
"messages": messages,
"temperature": 0.7,
"max_completion_tokens": 4096
}
headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
try:
@@ -258,7 +269,8 @@ async def _call_anthropic_async(model: str, prompt: str, system_prompt: str = No
payload = {
"model": model,
"max_tokens": 4096,
"messages": [{"role": "user", "content": prompt}]
"messages": [{"role": "user", "content": prompt}],
"temperature": 0.7
}
if system_prompt: payload["system"] = system_prompt
@@ -278,7 +290,10 @@ async def _call_ollama_async(model: str, prompt: str, system_prompt: str = None)
"model": model,
"prompt": prompt,
"stream": False,
"options": {"num_ctx": 4096}
"options": {
"num_ctx": 4096,
"temperature": 0.7
}
}
if system_prompt: payload["system"] = system_prompt