feat: add Ollama local provider support

- Added Ollama (local) as AI provider option
- Configure VPS endpoint for Ollama connection
- Auto-detect available models from Ollama server
- Support for vision-capable models (llama3.2-vision, etc)
This commit is contained in:
2026-04-04 19:46:14 +00:00
parent 97eb42c243
commit a395f0d696
5 changed files with 213 additions and 77 deletions

27
App.tsx
View File

@@ -14,6 +14,7 @@ import { ApiKeySetup } from './components/ApiKeySetup';
const App: React.FC = () => {
const [file, setFile] = useState<File | null>(null);
const [apiKey, setApiKey] = useState<string>('');
const [endpoint, setEndpoint] = useState<string>('');
const [provider, setProvider] = useState<AIProvider>('gemini');
const [model, setModel] = useState<string>('gemini-2.5-flash');
const [hasKey, setHasKey] = useState<boolean>(false);
@@ -25,11 +26,13 @@ const App: React.FC = () => {
const savedApiKey = localStorage.getItem('api-key');
const savedProvider = localStorage.getItem('ai-provider') as AIProvider;
const savedModel = localStorage.getItem('model-' + savedProvider);
const savedEndpoint = localStorage.getItem('ollama-endpoint');
if (savedApiKey) {
setApiKey(savedApiKey);
if (savedApiKey || savedEndpoint) {
setApiKey(savedApiKey || '');
if (savedProvider) setProvider(savedProvider);
if (savedModel) setModel(savedModel);
if (savedEndpoint) setEndpoint(savedEndpoint);
setHasKey(true);
}
}, []);
@@ -42,24 +45,33 @@ const App: React.FC = () => {
}
}, []);
const handleKeySave = useCallback((key: string, newProvider: AIProvider, newModel: string) => {
if (key) {
const handleKeySave = useCallback((key: string, newProvider: AIProvider, newModel: string, newEndpoint?: string) => {
setApiKey(key);
setProvider(newProvider);
setModel(newModel);
if (newEndpoint) {
setEndpoint(newEndpoint);
localStorage.setItem('ollama-endpoint', newEndpoint);
}
if (key) {
localStorage.setItem('api-key', key);
}
localStorage.setItem('ai-provider', newProvider);
localStorage.setItem('model-' + newProvider, newModel);
setHasKey(true);
}
}, []);
const handleAnalyzeClick = async () => {
if (!apiKey) {
if (provider !== 'ollama' && !apiKey) {
setError("A chave de API não foi encontrada. Por favor, configure-a novamente.");
setHasKey(false);
return;
}
if (provider === 'ollama' && !endpoint) {
setError("O endereço do Ollama não foi configurado. Por favor, configure-o.");
setHasKey(false);
return;
}
if (!file) {
setError("Por favor, selecione um arquivo primeiro.");
return;
@@ -72,7 +84,8 @@ const App: React.FC = () => {
provider,
apiKey,
model,
file
file,
endpoint: provider === 'ollama' ? endpoint : undefined
});
setReportData(data);
} catch (err) {

View File

@@ -4,7 +4,7 @@ import { PROVIDERS, type AIProvider } from '../types/providers';
import { testApiKey, type ModelInfo } from '../services/apiTestService';
interface ApiKeySetupProps {
onKeySave: (key: string, provider: AIProvider, model: string) => void;
onKeySave: (key: string, provider: AIProvider, model: string, endpoint?: string) => void;
}
const isValidApiKey = (key: string): boolean => {
@@ -63,6 +63,7 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
case 'openai': return 'https://platform.openai.com/api-keys';
case 'anthropic': return 'https://console.anthropic.com/keys';
case 'azure': return 'https://portal.azure.com/#view/Microsoft_AAD_IAM/ActiveDirectoryMenuBlade/RegisteredApps';
case 'ollama': return 'https://ollama.com/download';
default: return '#';
}
};
@@ -73,6 +74,7 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
case 'openai': return 'OpenAI';
case 'anthropic': return 'Anthropic (Claude)';
case 'azure': return 'Azure OpenAI';
case 'ollama': return 'Ollama (Local)';
default: return 'API';
}
};
@@ -115,7 +117,11 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
};
const handleSave = () => {
if (localApiKey.trim() && isValidApiKey(localApiKey)) {
if (provider === 'ollama') {
if (endpoint.trim()) {
onKeySave('', provider, model, endpoint.trim());
}
} else if (localApiKey.trim() && isValidApiKey(localApiKey)) {
onKeySave(localApiKey.trim(), provider, model);
}
};
@@ -157,24 +163,30 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
</div>
</div>
{/* Azure Endpoint (only show if Azure is selected) */}
{provider === 'azure' && (
{/* Azure/Ollama Endpoint */}
{(provider === 'azure' || provider === 'ollama') && (
<div>
<label htmlFor="azure-endpoint" className="block text-sm font-medium text-slate-700 dark:text-slate-300 mb-2">
Endpoint do Azure
<label htmlFor="provider-endpoint" className="block text-sm font-medium text-slate-700 dark:text-slate-300 mb-2">
{provider === 'ollama' ? 'Endereço do Ollama (VPS)' : 'Endpoint do Azure'}
</label>
<input
type="url"
id="azure-endpoint"
id="provider-endpoint"
className="block w-full rounded-xl border-slate-300 dark:border-slate-600 bg-white/50 dark:bg-slate-700/50 py-3 px-4 text-slate-900 dark:text-slate-100 shadow-sm focus:border-blue-500 focus:ring-blue-500 sm:text-sm"
placeholder="https://seu-resource.openai.azure.com"
placeholder={provider === 'ollama' ? 'http://192.168.1.100:11434' : 'https://seu-resource.openai.azure.com'}
value={endpoint}
onChange={(e) => setEndpoint(e.target.value)}
/>
{provider === 'ollama' && (
<p className="mt-1 text-xs text-slate-500">
Informe o IP público da sua VPS e a porta (padrão: 11434)
</p>
)}
</div>
)}
{/* API Key Input */}
{/* API Key Input (not needed for Ollama) */}
{provider !== 'ollama' && (
<div>
<label htmlFor="api-key-setup" className="block text-sm font-medium text-slate-700 dark:text-slate-300 mb-2">
Chave de API ({getProviderLabel(provider)})
@@ -230,6 +242,7 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
Não tem uma chave? <a href={getProviderLink(provider)} target="_blank" rel="noopener noreferrer" className="font-semibold text-blue-600 hover:text-blue-700 dark:text-blue-400 hover:underline">Obtenha aqui</a>.
</p>
</div>
)}
{/* Model Selector */}
<div>
@@ -268,7 +281,7 @@ export const ApiKeySetup: React.FC<ApiKeySetupProps> = ({ onKeySave }) => {
<button
onClick={handleSave}
disabled={!localApiKey.trim() || isValid === false}
disabled={provider !== 'ollama' && (!localApiKey.trim() || isValid === false)}
className="w-full flex justify-center items-center gap-2 bg-gradient-to-r from-blue-600 to-indigo-600 hover:from-blue-700 hover:to-indigo-700 text-white font-bold py-3.5 px-4 rounded-xl shadow-lg hover:shadow-blue-500/30 focus:outline-none focus:ring-2 focus:ring-offset-2 focus:ring-blue-500 disabled:opacity-50 disabled:cursor-not-allowed transition-all duration-300 transform active:scale-[0.98]"
>
<SaveIcon className="h-5 w-5" />

View File

@@ -347,7 +347,67 @@ export const analyzeCertificate = async (options: AnalyzeOptions): Promise<Repor
return analyzeWithAnthropic(file, apiKey, model);
case 'azure':
return analyzeWithAzure(file, apiKey, endpoint!, model);
case 'ollama':
return analyzeWithOllama(file, endpoint!, model);
default:
throw new Error(`Provedor não suportado: ${provider}`);
}
};
export const analyzeWithOllama = async (file: File, endpoint: string, model: string = 'llama3.2-vision'): Promise<ReportData> => {
if (!endpoint) {
throw new Error("O endpoint do Ollama é necessário. Configure o endereço da sua VPS.");
}
const base64Data = await new Promise<string>((resolve) => {
const reader = new FileReader();
reader.onloadend = () => resolve((reader.result as string).split(',')[1]);
reader.readAsDataURL(file);
});
const url = `${endpoint}/api/chat`;
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
model,
messages: [
{
role: 'user',
content: [
{
type: 'image',
data: base64Data
},
{
type: 'text',
text: PROMPT_BASE + "\n\nRetorne apenas JSON válido sem formatação markdown."
}
]
}
],
format: 'json',
options: {
temperature: 0.1,
num_predict: 4096
}
})
});
if (!response.ok) {
const error = await response.text();
throw new Error(`Erro do Ollama: ${error}`);
}
const data = await response.json();
const content = data.message?.content;
if (!content) {
throw new Error("Resposta vazia do Ollama");
}
return cleanAndParseJson(content) as ReportData;
};

View File

@@ -22,6 +22,8 @@ export const testApiKey = async (provider: AIProvider, apiKey: string, endpoint?
return await testAnthropic(apiKey);
case 'azure':
return await testAzure(apiKey, endpoint);
case 'ollama':
return await testOllama(endpoint);
default:
return { success: false, error: 'Provedor não suportado' };
}
@@ -33,6 +35,44 @@ export const testApiKey = async (provider: AIProvider, apiKey: string, endpoint?
}
};
const testOllama = async (endpoint?: string): Promise<TestResult> => {
if (!endpoint) {
return { success: false, error: 'Endereço do Ollama é obrigatório (ex: http://192.168.1.100:11434)' };
}
try {
const response = await fetch(`${endpoint}/api/tags`);
if (!response.ok) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço.' };
}
const data = await response.json();
const models = data.models?.map((m: any) => ({
id: m.name,
name: m.name
})) || [];
const visionModels = models.filter((m: ModelInfo) =>
m.id.includes('vision') ||
m.id.includes('llama3') ||
m.id.includes('qwen2') ||
m.id.includes('moondream')
);
if (visionModels.length > 0) {
return { success: true, models: visionModels };
}
return {
success: true,
models: models.length > 0 ? models : [{ id: 'llama3.2', name: 'Llama 3.2 (Padrão)' }]
};
} catch (error: any) {
return { success: false, error: 'Não foi possível conectar ao Ollama. Verifique o endereço e certifique-se que o Ollama está rodando.' };
}
};
const testGemini = async (apiKey: string): Promise<TestResult> => {
const { GoogleGenAI } = await import('@google/genai');

View File

@@ -1,4 +1,6 @@
export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure';
import { AIProvider } from './providers';
export type AIProvider = 'gemini' | 'openai' | 'anthropic' | 'azure' | 'ollama';
export interface ProviderConfig {
id: AIProvider;
@@ -38,5 +40,13 @@ export const PROVIDERS: ProviderConfig[] = [
models: ['gpt-4', 'gpt-4-32k', 'gpt-35-turbo'],
requiresEndpoint: true,
defaultModel: 'gpt-4'
},
{
id: 'ollama',
name: 'Ollama (Local)',
description: 'LLMs rodando localmente na sua VPS',
models: [],
requiresEndpoint: true,
defaultModel: 'llama3.2-vision'
}
];