feat: Ollama auto-detect without manual input
- Auto-detect Ollama endpoint from predefined URLs - Try multiple common addresses (localhost, VPS IPs, cloud domain) - One-click connect to Ollama without manual endpoint entry - Visual feedback during detection - Support for https://llm.reifonas.cloud
This commit is contained in:
@@ -2,6 +2,16 @@ import { AIProvider } from './providers';
|
||||
|
||||
export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure' | 'ollama';
|
||||
|
||||
export const OLLAMA_AUTO_DETECT_URLS = [
|
||||
'http://localhost:11434',
|
||||
'http://127.0.0.1:11434',
|
||||
'http://192.168.1.100:11434',
|
||||
'http://10.0.0.1:11434',
|
||||
'https://llm.reifonas.cloud',
|
||||
'http://ollama:11434',
|
||||
'http://host.docker.internal:11434',
|
||||
];
|
||||
|
||||
export interface ProviderConfig {
|
||||
id: AIProvider;
|
||||
name: string;
|
||||
|
||||
Reference in New Issue
Block a user