feat: support OpenWebUI as Ollama gateway
- Connect via OpenWebUI API at https://llm.reifonas.cloud/api - Use /api/v1/chat/completions format for OpenWebUI - Keep native Ollama format as fallback - Auto-detect models from both endpoints
This commit is contained in:
@@ -66,8 +66,12 @@ const testOllama = async (endpoint?: string): Promise<TestResult> => {
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize endpoint - add /api for OpenWebUI
|
||||
const useOpenWebUI = ollamaEndpoint.includes('llm.reifonas.cloud');
|
||||
const apiEndpoint = useOpenWebUI ? `${ollamaEndpoint}/api` : ollamaEndpoint;
|
||||
|
||||
try {
|
||||
const response = await fetch(`${ollamaEndpoint}/api/tags`);
|
||||
const response = await fetch(`${apiEndpoint}/tags`);
|
||||
|
||||
if (!response.ok) {
|
||||
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };
|
||||
|
||||
Reference in New Issue
Block a user