Files
SteelCheck/types/providers.ts
admtracksteel a395f0d696 feat: add Ollama local provider support
- Added Ollama (local) as AI provider option
- Configure VPS endpoint for Ollama connection
- Auto-detect available models from Ollama server
- Support for vision-capable models (llama3.2-vision, etc)
2026-04-04 19:46:14 +00:00

52 lines
1.4 KiB
TypeScript

import { AIProvider } from './providers';
export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure' | 'ollama';
export interface ProviderConfig {
id: AIProvider;
name: string;
description: string;
models: string[];
requiresEndpoint?: boolean;
defaultModel: string;
}
export const PROVIDERS: ProviderConfig[] = [
{
id: 'gemini',
name: 'Google Gemini',
description: 'Modelos avançados do Google com visão multimodal',
models: ['gemini-2.0-flash', 'gemini-2.5-flash', 'gemini-2.5-pro'],
defaultModel: 'gemini-2.5-flash'
},
{
id: 'openai',
name: 'OpenAI',
description: 'GPT-4 e modelos de visão da OpenAI',
models: ['gpt-4o', 'gpt-4o-mini', 'gpt-4-turbo', 'gpt-4-vision-preview'],
defaultModel: 'gpt-4o'
},
{
id: 'anthropic',
name: 'Anthropic (Claude)',
description: 'Claude 3 com análise avançada de documentos',
models: ['claude-3-opus-20240229', 'claude-3-sonnet-20240229', 'claude-3-haiku-20240307'],
defaultModel: 'claude-3-sonnet-20240229'
},
{
id: 'azure',
name: 'Azure OpenAI',
description: 'OpenAI via Azure com segurança enterprise',
models: ['gpt-4', 'gpt-4-32k', 'gpt-35-turbo'],
requiresEndpoint: true,
defaultModel: 'gpt-4'
},
{
id: 'ollama',
name: 'Ollama (Local)',
description: 'LLMs rodando localmente na sua VPS',
models: [],
requiresEndpoint: true,
defaultModel: 'llama3.2-vision'
}
];