mod: volume for config

This commit is contained in:
Andy Bunce 2026-03-17 15:32:37 +00:00
parent e5795f9c6a
commit 9b962ad4a4
3 changed files with 14 additions and 44 deletions

View file

@ -28,9 +28,10 @@ ENV PATH="$BUN_INSTALL/bin:$PATH"
RUN curl -fsSL https://bun.sh/install | bash RUN curl -fsSL https://bun.sh/install | bash
# use bun for node, npx # use bun for node, npx
RUN cd /usr/local/bin && \ RUN pushd /usr/local/bin && \
echo '#!/bin/sh' %3E npx && echo 'exec bunx "$@"' >> npx && chmod +x npx && \ echo '#!/bin/sh' %3E npx && echo 'exec bunx "$@"' >> npx && chmod +x npx && \
echo '#!/bin/sh' %3E node && echo 'exec bun "$@"' >> node && chmod +x node echo '#!/bin/sh' %3E node && echo 'exec bun "$@"' >> node && chmod +x node && \
popd
# --- NEW SECTION: RUST SETUP --- # --- NEW SECTION: RUST SETUP ---
@ -73,49 +74,7 @@ ENV PATH="/root/.opencode/bin:$PATH"
# 9. CONFIG PART 1: Hardware/Providers (opencode.json) # 9. CONFIG PART 1: Hardware/Providers (opencode.json)
# Maps your local ports to providers. # Maps your local ports to providers.
RUN mkdir -p /root/.config/opencode && \
cat <<EOF > /root/.config/opencode/opencode.json
{
"plugin": [],
"theme": "system",
"model": "halo/qwen3-coder-next:latest",
"small_model": "halo/gemma3:4b",
"provider": {
"halo": {
"npm": "@ai-sdk/openai-compatible",
"name": "(local)",
"options": { "baseURL": "http://host.docker.internal:11434/v1" },
"models": { "hf.co/unsloth/GLM-4.7-Flash-GGUF:Q6_K": { "name": "glm4.7" },
"hf.co/LiquidAI/LFM2-24B-A2B-GGUF:Q8_0": { "name": "lfm2" },
"qwen3-coder-next:latest": { "name": "qwen3" },
"gemma3:4b": {"name": "Gemma"}
}
}
},
"server": {
"port": 4096,
"hostname": "0.0.0.0",
"mdns": true,
"cors": ["https://example.com"]
}
}
EOF
# 10. CONFIG PART 2: Agent Brains (oh-my-opencode.json)
# Maps Agent Roles to Providers.
RUN cat <<EOF > /root/.config/opencode/oh-my-opencode.json
{
"agents": {
"Sisyphus": { "model": "halo/glm47" },
"Oracle": { "model": "halo/lfm2" },
"Librarian": { "model": "halo/glm47" },
"Hephaestus": { "model": "halo/qwen3" }
},
"disabled_agents": ["multimodal-looker"],
"confirm_dangerous_actions": false
}
EOF
#USER 1000:1000 #USER 1000:1000
WORKDIR /workspace WORKDIR /workspace

View file

@ -0,0 +1,10 @@
{
"agents": {
"Sisyphus": { "model": "halo/glm47" },
"Oracle": { "model": "halo/lfm2" },
"Librarian": { "model": "halo/glm47" },
"Hephaestus": { "model": "halo/qwen3" }
},
"disabled_agents": ["multimodal-looker"],
"confirm_dangerous_actions": false
}

View file

@ -14,6 +14,7 @@ function agent() {
--name "$CONTAINER_NAME" \ --name "$CONTAINER_NAME" \
--add-host=host.docker.internal:host-gateway \ --add-host=host.docker.internal:host-gateway \
-v "$TARGET_DIR:/workspace" \ -v "$TARGET_DIR:/workspace" \
-v "$HOME/dev/ai/opencode.cfg:/root/.config/opencode" \
-v "$HOME/dev/ai/docker-images/starship.toml:/root/.config/starship.toml" \ -v "$HOME/dev/ai/docker-images/starship.toml:/root/.config/starship.toml" \
-v opencode-cargo-cache:/root/.cargo \ -v opencode-cargo-cache:/root/.cargo \
-v opencode-go-cache:/root/go \ -v opencode-go-cache:/root/go \