From 0c07e740668bf2dce56f2c11688ed3d50acec434 Mon Sep 17 00:00:00 2001 From: Andy Bunce Date: Fri, 20 Mar 2026 22:45:57 +0000 Subject: [PATCH] add: glm4.7 --- opencode.cfg/opencode.json | 128 +++++++++++++++++++++---------------- 1 file changed, 74 insertions(+), 54 deletions(-) diff --git a/opencode.cfg/opencode.json b/opencode.cfg/opencode.json index 1e9c1b6..231919c 100644 --- a/opencode.cfg/opencode.json +++ b/opencode.cfg/opencode.json @@ -12,16 +12,23 @@ }, "models": { "hf.co/unsloth/GLM-4.7-Flash-GGUF:Q6_K": { - "name": "glm4.7" + "name": "glm4.7", + "family": "glm", + "reasoning": true, + "tool_call": true, + "limit": { + "context": 131072, // Context window size + "output": 40000 // Output token limit + }, + "options": { + "num_ctx": 16000, // Context window size for Ollama + "temperature": 0.7, + "top_p": 1.0, + "max_tokens": 20000 // Max response length + }, }, "hf.co/LiquidAI/LFM2-24B-A2B-GGUF:Q8_0": { - "name": "lfm2" - }, - "qwen3-coder-next:latest": { - "name": "qwen3", - "family": "qwen3next", - "reasoning": false, - "tool_call": true, + "name": "lfm2", "modalities": { "input": [ "text" @@ -30,54 +37,67 @@ "text" ] }, - "limit": { - "context": 128000, // Context window size - "output": 8192 // Output token limit + "qwen3-coder-next:latest": { + "name": "qwen3", + "family": "qwen3next", + "reasoning": false, + "tool_call": true, + "limit": { + "context": 128000, // Context window size + "output": 8192 // Output token limit + }, + "options": { + "num_ctx": 16000, // Context window size for Ollama + "temperature": 1.0, + "top_p": 0.95, + "top_k": 40, + "max_tokens": 8192 // Max response length + }, + "modalities": { + "input": [ + "text" + ], + "output": [ + "text" + ] + } }, - "options": { - "num_ctx": 16000, // Context window size for Ollama - "temperature": 1.0, - "top_p": 0.95, - "top_k": 40, - "max_tokens": 8192 // Max response length + "gemma3:4b": { + "name": "Gemma 3:4b", + "family": "gemma", + "temperature": false, + "reasoning": false, + "attachment": true, + "tool_call": false, + "modalities": { + "input": [ + "text", + "image" + ], // Explicitly declare image support + "output": [ + "text" + ] + }, + "limit": { + "context": 100000, // Maximum context window (input + output) + "output": 8192 // Maximum output tokens + }, + "options": { + "num_ctx": 16000 // Context window size for Ollama + }, + "headers": {}, + "release_date": "13 March 2025", + "variants": {} } - }, - "gemma3:4b": { - "name": "Gemma 3:4b", - "family": "gemma", - "temperature": false, - "reasoning": false, - "attachment": true, - "tool_call": false, - "modalities": { - "input": [ - "text", - "image" - ], // Explicitly declare image support - "output": [ - "text" - ] - }, - "limit": { - "context": 100000, // Maximum context window (input + output) - "output": 8192 // Maximum output tokens - }, - "options": { - "num_ctx": 16000 // Context window size for Ollama - }, - "headers": {}, - "release_date": "13 March 2025", - "variants": {} } } + }, + "server": { + "port": 4096, + "hostname": "0.0.0.0", + "mdns": false, + "cors": [ + "https://example.com" + ] } - }, - "server": { - "port": 4096, - "hostname": "0.0.0.0", - "mdns": false, - "cors": [ - "https://example.com" - ] - } -} \ No newline at end of file + } \ No newline at end of file