feat: support OpenWebUI as Ollama gateway

- Connect via OpenWebUI API at https://llm.reifonas.cloud/api
- Use /api/v1/chat/completions format for OpenWebUI
- Keep native Ollama format as fallback
- Auto-detect models from both endpoints
This commit is contained in:
2026-04-04 20:22:27 +00:00
parent 075f6ae0bc
commit f41c5eccd2
3 changed files with 25 additions and 8 deletions

View File

@@ -365,12 +365,21 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
reader.readAsDataURL(file); reader.readAsDataURL(file);
}); });
const url = `${endpoint}/api/chat`; let url = `${endpoint}/api/chat`;
// Check if using OpenWebUI (has /api in path but not direct Ollama)
const useOpenWebUI = endpoint.includes('/api') || endpoint.includes('llm.reifonas.cloud');
if (useOpenWebUI) {
// Use OpenWebUI API format
url = `${endpoint}/api/v1/chat/completions`;
}
const response = await fetch(url, { const response = await fetch(url, {
method: 'POST', method: 'POST',
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json',
...(useOpenWebUI ? { 'Authorization': 'Bearer no-key-required' } : {})
}, },
body: JSON.stringify({ body: JSON.stringify({
model, model,
@@ -379,8 +388,8 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
role: 'user', role: 'user',
content: [ content: [
{ {
type: 'image', type: 'image_url',
data: base64Data image_url: { url: `data:${file.type};base64,${base64Data}` }
}, },
{ {
type: 'text', type: 'text',
@@ -389,7 +398,7 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
] ]
} }
], ],
format: 'json', ...(useOpenWebUI ? {} : { format: 'json' }),
options: { options: {
temperature: 0.1, temperature: 0.1,
num_predict: 4096 num_predict: 4096
@@ -399,14 +408,17 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
if (!response.ok) { if (!response.ok) {
const error = await response.text(); const error = await response.text();
throw new Error(`Erro do Ollama: ${error}`); throw new Error(`Erro do Ollama/OpenWebUI: ${error}`);
} }
const data = await response.json(); const data = await response.json();
const content = data.message?.content;
// OpenWebUI format: data.choices[0].message.content
// Ollama native format: data.message.content
const content = data.choices?.[0]?.message?.content || data.message?.content;
if (!content) { if (!content) {
throw new Error("Resposta vazia do Ollama"); throw new Error("Resposta vazia do Ollama/OpenWebUI");
} }
return cleanAndParseJson(content) as ReportData; return cleanAndParseJson(content) as ReportData;

View File

@@ -66,8 +66,12 @@ const testOllama = async (endpoint?: string): Promise<TestResult> => {
} }
} }
// Normalize endpoint - add /api for OpenWebUI
const useOpenWebUI = ollamaEndpoint.includes('llm.reifonas.cloud');
const apiEndpoint = useOpenWebUI ? `${ollamaEndpoint}/api` : ollamaEndpoint;
try { try {
const response = await fetch(`${ollamaEndpoint}/api/tags`); const response = await fetch(`${apiEndpoint}/tags`);
if (!response.ok) { if (!response.ok) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' }; return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };

View File

@@ -7,6 +7,7 @@ export const OLLAMA_AUTO_DETECT_URLS = [
'http://127.0.0.1:11434', 'http://127.0.0.1:11434',
'http://192.168.1.100:11434', 'http://192.168.1.100:11434',
'http://10.0.0.1:11434', 'http://10.0.0.1:11434',
'http://10.0.1.1:11434',
'https://llm.reifonas.cloud', 'https://llm.reifonas.cloud',
'http://ollama:11434', 'http://ollama:11434',
'http://host.docker.internal:11434', 'http://host.docker.internal:11434',