feat: add Ollama local provider support

- Added Ollama (local) as AI provider option
- Configure VPS endpoint for Ollama connection
- Auto-detect available models from Ollama server
- Support for vision-capable models (llama3.2-vision, etc)
This commit is contained in:
2026-04-04 19:46:14 +00:00
parent 97eb42c243
commit a395f0d696
5 changed files with 213 additions and 77 deletions

39
App.tsx
View File

@@ -14,6 +14,7 @@ import { ApiKeySetup } from './components/ApiKeySetup';
const App: React.FC = () => {
const [file, setFile] = useState<File | null>(null);
const [apiKey, setApiKey] = useState<string>('');
const [endpoint, setEndpoint] = useState<string>('');
const [provider, setProvider] = useState<AIProvider>('gemini');
const [model, setModel] = useState<string>('gemini-2.5-flash');
const [hasKey, setHasKey] = useState<boolean>(false);
@@ -25,11 +26,13 @@ const App: React.FC = () => {
const savedApiKey = localStorage.getItem('api-key');
const savedProvider = localStorage.getItem('ai-provider') as AIProvider;
const savedModel = localStorage.getItem('model-' + savedProvider);
const savedEndpoint = localStorage.getItem('ollama-endpoint');
if (savedApiKey) {
setApiKey(savedApiKey);
if (savedApiKey || savedEndpoint) {
setApiKey(savedApiKey || '');
if (savedProvider) setProvider(savedProvider);
if (savedModel) setModel(savedModel);
if (savedEndpoint) setEndpoint(savedEndpoint);
setHasKey(true);
}
}, []);
@@ -42,24 +45,33 @@ const App: React.FC = () => {
}
}, []);
const handleKeySave = useCallback((key: string, newProvider: AIProvider, newModel: string) => {
if (key) {
setApiKey(key);
setProvider(newProvider);
setModel(newModel);
localStorage.setItem('api-key', key);
localStorage.setItem('ai-provider', newProvider);
localStorage.setItem('model-' + newProvider, newModel);
setHasKey(true);
const handleKeySave = useCallback((key: string, newProvider: AIProvider, newModel: string, newEndpoint?: string) => {
setApiKey(key);
setProvider(newProvider);
setModel(newModel);
if (newEndpoint) {
setEndpoint(newEndpoint);
localStorage.setItem('ollama-endpoint', newEndpoint);
}
if (key) {
localStorage.setItem('api-key', key);
}
localStorage.setItem('ai-provider', newProvider);
localStorage.setItem('model-' + newProvider, newModel);
setHasKey(true);
}, []);
const handleAnalyzeClick = async () => {
if (!apiKey) {
if (provider !== 'ollama' && !apiKey) {
setError("A chave de API não foi encontrada. Por favor, configure-a novamente.");
setHasKey(false);
return;
}
if (provider === 'ollama' && !endpoint) {
setError("O endereço do Ollama não foi configurado. Por favor, configure-o.");
setHasKey(false);
return;
}
if (!file) {
setError("Por favor, selecione um arquivo primeiro.");
return;
@@ -72,7 +84,8 @@ const App: React.FC = () => {
provider,
apiKey,
model,
file
file,
endpoint: provider === 'ollama' ? endpoint : undefined
});
setReportData(data);
} catch (err) {