feat: support OpenWebUI as Ollama gateway

- Connect via OpenWebUI API at https://llm.reifonas.cloud/api
- Use /api/v1/chat/completions format for OpenWebUI
- Keep native Ollama format as fallback
- Auto-detect models from both endpoints
This commit is contained in:
2026-04-04 20:22:27 +00:00
parent 075f6ae0bc
commit f41c5eccd2
3 changed files with 25 additions and 8 deletions

View File

@@ -66,8 +66,12 @@ const testOllama = async (endpoint?: string): Promise<TestResult> => {
}
}
// Normalize endpoint - add /api for OpenWebUI
const useOpenWebUI = ollamaEndpoint.includes('llm.reifonas.cloud');
const apiEndpoint = useOpenWebUI ? `${ollamaEndpoint}/api` : ollamaEndpoint;
try {
const response = await fetch(`${ollamaEndpoint}/api/tags`);
const response = await fetch(`${apiEndpoint}/tags`);
if (!response.ok) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };