import { AIProvider } from './providers'; export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure' | 'ollama'; export const OLLAMA_AUTO_DETECT_URLS = [ 'http://localhost:11434', 'http://127.0.0.1:11434', 'http://192.168.1.100:11434', 'http://10.0.0.1:11434', 'https://llm.reifonas.cloud', 'http://ollama:11434', 'http://host.docker.internal:11434', ]; export interface ProviderConfig { id: AIProvider; name: string; description: string; models: string[]; requiresEndpoint?: boolean; defaultModel: string; } export const PROVIDERS: ProviderConfig[] = [ { id: 'gemini', name: 'Google Gemini', description: 'Modelos avançados do Google com visão multimodal', models: ['gemini-2.0-flash', 'gemini-2.5-flash', 'gemini-2.5-pro'], defaultModel: 'gemini-2.5-flash' }, { id: 'openai', name: 'OpenAI', description: 'GPT-4 e modelos de visão da OpenAI', models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4-vision-preview'], defaultModel: 'gpt-4o' }, { id: 'anthropic', name: 'Anthropic (Claude)', description: 'Claude 3 com análise avançada de documentos', models: ['claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307'], defaultModel: 'claude-3-sonnet-20240229' }, { id: 'azure', name: 'Azure OpenAI', description: 'OpenAI via Azure com segurança enterprise', models: ['gpt-4', 'gpt-4-32k', 'gpt-35-turbo'], requiresEndpoint: true, defaultModel: 'gpt-4' }, { id: 'ollama', name: 'Ollama (Local)', description: 'LLMs rodando localmente na sua VPS', models: [], requiresEndpoint: true, defaultModel: 'llama3.2-vision' } ];