Files
SteelCheck/services/apiTestService.ts
admtracksteel f41c5eccd2 feat: support OpenWebUI as Ollama gateway
- Connect via OpenWebUI API at https://llm.reifonas.cloud/api
- Use /api/v1/chat/completions format for OpenWebUI
- Keep native Ollama format as fallback
- Auto-detect models from both endpoints
2026-04-04 20:22:27 +00:00

232 lines
6.8 KiB
TypeScript

import { PROVIDERS, type AIProvider } from '../types/providers';
import { OLLAMA_AUTO_DETECT_URLS } from '../types/providers';
export interface ModelInfo {
id: string;
name: string;
}
interface TestResult {
success: boolean;
models?: ModelInfo[];
error?: string;
endpoint?: string;
}
export const testApiKey = async (provider: AIProvider, apiKey: string, endpoint?: string): Promise<TestResult> => {
try {
switch (provider) {
case 'gemini':
return await testGemini(apiKey);
case 'openai':
return await testOpenAI(apiKey);
case 'anthropic':
return await testAnthropic(apiKey);
case 'azure':
return await testAzure(apiKey, endpoint);
case 'ollama':
return await testOllama(endpoint);
default:
return { success: false, error: 'Provedor não suportado' };
}
} catch (error) {
return {
success: false,
error: error instanceof Error ? error.message : 'Erro desconhecido ao testar API'
};
}
};
const findOllamaEndpoint = async (): Promise<string | null> => {
for (const url of OLLAMA_AUTO_DETECT_URLS) {
try {
const response = await fetch(`${url}/api/tags`, {
method: 'GET',
signal: AbortSignal.timeout(3000)
});
if (response.ok) {
return url;
}
} catch {
continue;
}
}
return null;
};
const testOllama = async (endpoint?: string): Promise<TestResult> => {
let ollamaEndpoint = endpoint;
if (!ollamaEndpoint) {
const foundEndpoint = await findOllamaEndpoint();
if (foundEndpoint) {
ollamaEndpoint = foundEndpoint;
} else {
return { success: false, error: 'Ollama não encontrado. Configure o endereço manualmente.' };
}
}
// Normalize endpoint - add /api for OpenWebUI
const useOpenWebUI = ollamaEndpoint.includes('llm.reifonas.cloud');
const apiEndpoint = useOpenWebUI ? `${ollamaEndpoint}/api` : ollamaEndpoint;
try {
const response = await fetch(`${apiEndpoint}/tags`);
if (!response.ok) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };
}
const data = await response.json();
const models = data.models?.map((m: any) => ({
id: m.name,
name: m.name
})) || [];
const visionModels = models.filter((m: ModelInfo) =>
m.id.includes('vision') ||
m.id.includes('llama3') ||
m.id.includes('qwen2') ||
m.id.includes('moondream')
);
if (visionModels.length > 0) {
return { success: true, models: visionModels, endpoint: ollamaEndpoint };
}
return {
success: true,
models: models.length > 0 ? models : [{ id: 'llama3.2', name: 'Llama 3.2 (Padrão)' }],
endpoint: ollamaEndpoint
};
} catch (error: any) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço e certifique-se que o Ollama está rodando.' };
}
};
const testGemini = async (apiKey: string): Promise<TestResult> => {
const { GoogleGenAI } = await import('@google/genai');
try {
const ai = new GoogleGenAI({ apiKey });
const response = await ai.models.generateContent({
model: 'gemini-2.5-flash',
contents: 'Respond with exactly: {"status": "ok"}',
config: { temperature: 0 }
});
if (response.text && response.text.includes('ok')) {
return {
success: true,
models: [
{ id: 'gemini-2.0-flash', name: 'Gemini 2.0 Flash (Rápido)' },
{ id: 'gemini-2.5-flash', name: 'Gemini 2.5 Flash (Equilibrado)' },
{ id: 'gemini-2.5-pro', name: 'Gemini 2.5 Pro (Mais potente)' }
]
};
}
return { success: false, error: 'Resposta inválida do Gemini' };
} catch (error: any) {
if (error.message?.includes('API key')) {
return { success: false, error: 'Chave de API inválida' };
}
throw error;
}
};
const testOpenAI = async (apiKey: string): Promise<TestResult> => {
try {
const response = await fetch('https://api.openai.com/v1/models', {
headers: { 'Authorization': `Bearer ${apiKey}` }
});
if (!response.ok) {
const error = await response.json();
return { success: false, error: error.error?.message || 'Chave de API inválida' };
}
const data = await response.json();
const visionModels = data.data
.filter((m: any) => m.id.includes('gpt-4o') || m.id.includes('gpt-4') || m.id.includes('gpt-4-turbo'))
.map((m: any) => ({ id: m.id, name: m.id }));
if (visionModels.length === 0) {
return {
success: true,
models: [{ id: 'gpt-4o', name: 'GPT-4o (Padrão)' }]
};
}
return { success: true, models: visionModels };
} catch (error: any) {
return { success: false, error: error.message || 'Erro ao conectar com OpenAI' };
}
};
const testAnthropic = async (apiKey: string): Promise<TestResult> => {
try {
const response = await fetch('https://api.anthropic.com/v1/messages', {
method: 'POST',
headers: {
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
'Content-Type': 'application/json',
},
body: JSON.stringify({
model: 'claude-3-haiku-20240307',
max_tokens: 10,
messages: [{ role: 'user', content: 'Hi' }]
})
});
if (!response.ok) {
const error = await response.json();
return { success: false, error: error.error?.message || 'Chave de API inválida' };
}
return {
success: true,
models: [
{ id: 'claude-3-opus-20240229', name: 'Claude 3 Opus (Mais potente)' },
{ id: 'claude-3-sonnet-20240229', name: 'Claude 3 Sonnet (Equilibrado)' },
{ id: 'claude-3-haiku-20240307', name: 'Claude 3 Haiku (Rápido)' }
]
};
} catch (error: any) {
return { success: false, error: error.message || 'Erro ao conectar com Anthropic' };
}
};
const testAzure = async (apiKey: string, endpoint: string): Promise<TestResult> => {
if (!endpoint) {
return { success: false, error: 'Endpoint do Azure é obrigatório' };
}
try {
const url = `${endpoint}/openai/deployments?api-version=2024-02-15-preview`;
const response = await fetch(url, {
headers: { 'api-key': apiKey }
});
if (!response.ok) {
return { success: false, error: 'Endpoint ou chave inválidos' };
}
const data = await response.json();
const deployments = data.data?.map((d: any) => ({
id: d.id,
name: d.id
})) || [];
if (deployments.length === 0) {
return {
success: true,
models: [{ id: 'gpt-4', name: 'GPT-4 (Padrão)' }]
};
}
return { success: true, models: deployments };
} catch (error: any) {
return { success: false, error: error.message || 'Erro ao conectar com Azure' };
}
};