feat: Ollama auto-detect without manual input

- Auto-detect Ollama endpoint from predefined URLs
- Try multiple common addresses (localhost, VPS IPs, cloud domain)
- One-click connect to Ollama without manual endpoint entry
- Visual feedback during detection
- Support for https://llm.reifonas.cloud
This commit is contained in:
2026-04-04 19:51:34 +00:00
parent a395f0d696
commit 075f6ae0bc
3 changed files with 121 additions and 25 deletions

View File

@@ -2,6 +2,16 @@ import { AIProvider } from './providers';
export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure' | 'ollama';
export const OLLAMA_AUTO_DETECT_URLS = [
'http://localhost:11434',
'http://127.0.0.1:11434',
'http://192.168.1.100:11434',
'http://10.0.0.1:11434',
'https://llm.reifonas.cloud',
'http://ollama:11434',
'http://host.docker.internal:11434',
];
export interface ProviderConfig {
id: AIProvider;
name: string;