Change default Ollama model from qwen2.5-coder to llama3.2:1b for faster chat
This commit is contained in:
@@ -39,7 +39,7 @@ LLM_PROVIDERS = {
|
||||
"type": "local",
|
||||
"endpoint": os.getenv("OLLAMA_HOST", "http://ollama:11434"),
|
||||
"models": None,
|
||||
"default": "qwen2.5-coder:1.5b"
|
||||
"default": "llama3.2:1b"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,7 +64,7 @@ def get_config() -> dict:
|
||||
return {
|
||||
"orchestrator": {
|
||||
"planner": {"provider": "gemini", "model": "gemini-2.5-flash"},
|
||||
"executor": {"provider": "ollama", "model": "qwen2.5-coder:1.5b"}
|
||||
"executor": {"provider": "ollama", "model": "llama3.2:1b"}
|
||||
},
|
||||
"api_keys": {
|
||||
"openai": "",
|
||||
@@ -85,7 +85,7 @@ def get_orchestrator_config() -> dict:
|
||||
cfg = get_config()
|
||||
return cfg.get("orchestrator", {
|
||||
"planner": {"provider": "gemini", "model": "gemini-2.5-flash"},
|
||||
"executor": {"provider": "ollama", "model": "qwen2.5-coder:1.5b"}
|
||||
"executor": {"provider": "ollama", "model": "llama3.2:1b"}
|
||||
})
|
||||
|
||||
def set_planner(provider: str = None, model: str = None) -> dict:
|
||||
@@ -116,7 +116,7 @@ def set_executor(provider: str = None, model: str = None) -> dict:
|
||||
}
|
||||
save_config(cfg)
|
||||
|
||||
return cfg["orchestrator"].get("executor", {"provider": "ollama", "model": "qwen2.5-coder:1.5b"})
|
||||
return cfg["orchestrator"].get("executor", {"provider": "ollama", "model": "llama3.2:1b"})
|
||||
return cfg["orchestrator"]["executor"]
|
||||
|
||||
def set_api_key(provider: str, key: str):
|
||||
@@ -370,7 +370,7 @@ def get_planner_llm() -> tuple:
|
||||
def get_executor_llm() -> tuple:
|
||||
"""Retorna provider e modelo do executor configurado."""
|
||||
cfg = get_orchestrator_config()
|
||||
executor = cfg.get("executor", {"provider": "ollama", "model": "qwen2.5-coder:1.5b"})
|
||||
executor = cfg.get("executor", {"provider": "ollama", "model": "llama3.2:1b"})
|
||||
return executor["provider"], executor["model"]
|
||||
|
||||
def call_planner(prompt: str, system_prompt: str = None) -> str:
|
||||
|
||||
Reference in New Issue
Block a user