feat: support OpenWebUI as Ollama gateway

- Connect via OpenWebUI API at https://llm.reifonas.cloud/api
- Use /api/v1/chat/completions format for OpenWebUI
- Keep native Ollama format as fallback
- Auto-detect models from both endpoints
This commit is contained in:
2026-04-04 20:22:27 +00:00
parent 075f6ae0bc
commit f41c5eccd2
3 changed files with 25 additions and 8 deletions

View File

@@ -365,12 +365,21 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
reader.readAsDataURL(file);
});
const url = `${endpoint}/api/chat`;
let url = `${endpoint}/api/chat`;
// Check if using OpenWebUI (has /api in path but not direct Ollama)
const useOpenWebUI = endpoint.includes('/api') || endpoint.includes('llm.reifonas.cloud');
if (useOpenWebUI) {
// Use OpenWebUI API format
url = `${endpoint}/api/v1/chat/completions`;
}
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
...(useOpenWebUI ? { 'Authorization': 'Bearer no-key-required' } : {})
},
body: JSON.stringify({
model,
@@ -379,8 +388,8 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
role: 'user',
content: [
{
type: 'image',
data: base64Data
type: 'image_url',
image_url: { url: `data:${file.type};base64,${base64Data}` }
},
{
type: 'text',
@@ -389,7 +398,7 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
]
}
],
format: 'json',
...(useOpenWebUI ? {} : { format: 'json' }),
options: {
temperature: 0.1,
num_predict: 4096
@@ -399,14 +408,17 @@ export const analyzeWithOllama = async (file: File, endpoint: string, model: str
if (!response.ok) {
const error = await response.text();
throw new Error(`Erro do Ollama: ${error}`);
throw new Error(`Erro do Ollama/OpenWebUI: ${error}`);
}
const data = await response.json();
const content = data.message?.content;
// OpenWebUI format: data.choices[0].message.content
// Ollama native format: data.message.content
const content = data.choices?.[0]?.message?.content || data.message?.content;
if (!content) {
throw new Error("Resposta vazia do Ollama");
throw new Error("Resposta vazia do Ollama/OpenWebUI");
}
return cleanAndParseJson(content) as ReportData;

View File

@@ -66,8 +66,12 @@ const testOllama = async (endpoint?: string): Promise<TestResult> => {
}
}
// Normalize endpoint - add /api for OpenWebUI
const useOpenWebUI = ollamaEndpoint.includes('llm.reifonas.cloud');
const apiEndpoint = useOpenWebUI ? `${ollamaEndpoint}/api` : ollamaEndpoint;
try {
const response = await fetch(`${ollamaEndpoint}/api/tags`);
const response = await fetch(`${apiEndpoint}/tags`);
if (!response.ok) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };

View File

@@ -7,6 +7,7 @@ export const OLLAMA_AUTO_DETECT_URLS = [
'http://127.0.0.1:11434',
'http://192.168.1.100:11434',
'http://10.0.0.1:11434',
'http://10.0.1.1:11434',
'https://llm.reifonas.cloud',
'http://ollama:11434',
'http://host.docker.internal:11434',