diff --git a/docker-images/Dockerfile.agent b/docker-images/Dockerfile.agent index 51b89c9..74762eb 100644 --- a/docker-images/Dockerfile.agent +++ b/docker-images/Dockerfile.agent @@ -61,20 +61,19 @@ RUN bunx oh-my-opencode@latest install --no-tui \ RUN mkdir -p /root/.config/opencode && \ cat < /root/.config/opencode/opencode.json { - "plugin": ["oh-my-opencode"], - "theme": "oh-my-opencode", - "model": "halo/qwen3", - "small_model": "halo/gemma3", - "default_agent": "Sisyphus", + "plugin": [], + "theme": "system", + "model": "halo/qwen3-coder-next:latest", + "small_model": "halo/gemma3:4b", "provider": { "halo": { "npm": "@ai-sdk/openai-compatible", "name": "local models", "options": { "baseURL": "http://host.docker.internal:11434/v1" }, - "models": { "glm47": { "name": "hf.co/unsloth/GLM-4.7-Flash-GGUF:Q6_K" }, - "lfm2": { "name": "hf.co/LiquidAI/LFM2-24B-A2B-GGUF:Q8_0" }, - "qwen3": { "name": "qwen3-coder-next:latest" }, - "gemma3": {"name": "gemma3:4b"} + "models": { "hf.co/unsloth/GLM-4.7-Flash-GGUF:Q6_K": { "name": "glm4.7" }, + "hf.co/LiquidAI/LFM2-24B-A2B-GGUF:Q8_0": { "name": "lfm2" }, + "qwen3-coder-next:latest": { "name": "qwen3" }, + "gemma3:4b": {"name": "Gemma"} } }