refactor: replace opencode/glm-4.7-free with opencode/big-pickle model

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
YeonGyu-Kim 2026-02-18 18:07:16 +09:00
parent 096db59399
commit 6bf365595f
14 changed files with 87 additions and 87 deletions

View File

@ -13,7 +13,7 @@ Agent factories following `createXXXAgent(model) → AgentConfig` pattern. Each
| **Sisyphus** | claude-opus-4-6 | 0.1 | primary | kimi-k2.5 → glm-4.7 → gemini-3-pro | Main orchestrator, plans + delegates | | **Sisyphus** | claude-opus-4-6 | 0.1 | primary | kimi-k2.5 → glm-4.7 → gemini-3-pro | Main orchestrator, plans + delegates |
| **Hephaestus** | gpt-5.3-codex | 0.1 | primary | NONE (required) | Autonomous deep worker | | **Hephaestus** | gpt-5.3-codex | 0.1 | primary | NONE (required) | Autonomous deep worker |
| **Oracle** | gpt-5.2 | 0.1 | subagent | claude-opus-4-6 → gemini-3-pro | Read-only consultation | | **Oracle** | gpt-5.2 | 0.1 | subagent | claude-opus-4-6 → gemini-3-pro | Read-only consultation |
| **Librarian** | glm-4.7 | 0.1 | subagent | glm-4.7-free → claude-sonnet-4-6 | External docs/code search | | **Librarian** | glm-4.7 | 0.1 | subagent | big-pickle → claude-sonnet-4-6 | External docs/code search |
| **Explore** | grok-code-fast-1 | 0.1 | subagent | claude-haiku-4-5 → gpt-5-nano | Contextual grep | | **Explore** | grok-code-fast-1 | 0.1 | subagent | claude-haiku-4-5 → gpt-5-nano | Contextual grep |
| **Multimodal-Looker** | gemini-3-flash | 0.1 | subagent | gpt-5.2 → glm-4.6v → ... (6 deep) | PDF/image analysis | | **Multimodal-Looker** | gemini-3-flash | 0.1 | subagent | gpt-5.2 → glm-4.6v → ... (6 deep) | PDF/image analysis |
| **Metis** | claude-opus-4-6 | **0.3** | subagent | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Pre-planning consultant | | **Metis** | claude-opus-4-6 | **0.3** | subagent | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Pre-planning consultant |

View File

@ -19,7 +19,7 @@ describe("createBuiltinAgents with model overrides", () => {
"kimi-for-coding/k2p5", "kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free", "opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7", "zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free", "opencode/big-pickle",
]) ])
) )
@ -260,7 +260,7 @@ describe("createBuiltinAgents with model overrides", () => {
"kimi-for-coding/k2p5", "kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free", "opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7", "zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free", "opencode/big-pickle",
"openai/gpt-5.2", "openai/gpt-5.2",
]) ])
) )
@ -506,7 +506,7 @@ describe("createBuiltinAgents without systemDefaultModel", () => {
"kimi-for-coding/k2p5", "kimi-for-coding/k2p5",
"opencode/kimi-k2.5-free", "opencode/kimi-k2.5-free",
"zai-coding-plan/glm-4.7", "zai-coding-plan/glm-4.7",
"opencode/glm-4.7-free", "opencode/big-pickle",
]) ])
) )

View File

@ -51,7 +51,7 @@ cli/
## MODEL FALLBACK SYSTEM ## MODEL FALLBACK SYSTEM
Priority: Claude > OpenAI > Gemini > Copilot > OpenCode Zen > Z.ai > Kimi > glm-4.7-free Priority: Claude > OpenAI > Gemini > Copilot > OpenCode Zen > Z.ai > Kimi > big-pickle
Agent-specific: librarian→ZAI, explore→Haiku/nano, hephaestus→requires OpenAI/Copilot Agent-specific: librarian→ZAI, explore→Haiku/nano, hephaestus→requires OpenAI/Copilot

View File

@ -5,57 +5,57 @@ exports[`generateModelConfig no providers available returns ULTIMATE_FALLBACK fo
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json", "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": { "agents": {
"atlas": { "atlas": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"explore": { "explore": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"hephaestus": { "hephaestus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"momus": { "momus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"oracle": { "oracle": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"prometheus": { "prometheus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
"categories": { "categories": {
"artistry": { "artistry": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"deep": { "deep": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"quick": { "quick": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"ultrabrain": { "ultrabrain": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-high": { "unspecified-high": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-low": { "unspecified-low": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"visual-engineering": { "visual-engineering": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"writing": { "writing": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
} }
@ -205,7 +205,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium", "variant": "medium",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.2",
@ -233,7 +233,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium", "variant": "medium",
}, },
"quick": { "quick": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"ultrabrain": { "ultrabrain": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@ -248,10 +248,10 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium", "variant": "medium",
}, },
"visual-engineering": { "visual-engineering": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"writing": { "writing": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
} }
@ -272,7 +272,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium", "variant": "medium",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "openai/gpt-5.2", "model": "openai/gpt-5.2",
@ -300,7 +300,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium", "variant": "medium",
}, },
"quick": { "quick": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"ultrabrain": { "ultrabrain": {
"model": "openai/gpt-5.3-codex", "model": "openai/gpt-5.3-codex",
@ -315,10 +315,10 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium", "variant": "medium",
}, },
"visual-engineering": { "visual-engineering": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"writing": { "writing": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
} }
@ -335,7 +335,7 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
@ -396,7 +396,7 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
@ -614,7 +614,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "medium", "variant": "medium",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "opencode/claude-opus-4-6", "model": "opencode/claude-opus-4-6",
@ -688,7 +688,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "medium", "variant": "medium",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "opencode/claude-opus-4-6", "model": "opencode/claude-opus-4-6",
@ -902,7 +902,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json", "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": { "agents": {
"atlas": { "atlas": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"explore": { "explore": {
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
@ -911,19 +911,19 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-4.7",
}, },
"metis": { "metis": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"momus": { "momus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v", "model": "zai-coding-plan/glm-4.6v",
}, },
"oracle": { "oracle": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"prometheus": { "prometheus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"sisyphus": { "sisyphus": {
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-4.7",
@ -931,22 +931,22 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
}, },
"categories": { "categories": {
"quick": { "quick": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"ultrabrain": { "ultrabrain": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-high": { "unspecified-high": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-low": { "unspecified-low": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"visual-engineering": { "visual-engineering": {
"model": "zai-coding-plan/glm-5", "model": "zai-coding-plan/glm-5",
}, },
"writing": { "writing": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
} }
@ -957,7 +957,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json", "$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": { "agents": {
"atlas": { "atlas": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"explore": { "explore": {
"model": "opencode/gpt-5-nano", "model": "opencode/gpt-5-nano",
@ -966,19 +966,19 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-4.7",
}, },
"metis": { "metis": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"momus": { "momus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "zai-coding-plan/glm-4.6v", "model": "zai-coding-plan/glm-4.6v",
}, },
"oracle": { "oracle": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"prometheus": { "prometheus": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"sisyphus": { "sisyphus": {
"model": "zai-coding-plan/glm-4.7", "model": "zai-coding-plan/glm-4.7",
@ -986,22 +986,22 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
}, },
"categories": { "categories": {
"quick": { "quick": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"ultrabrain": { "ultrabrain": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-high": { "unspecified-high": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"unspecified-low": { "unspecified-low": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"visual-engineering": { "visual-engineering": {
"model": "zai-coding-plan/glm-5", "model": "zai-coding-plan/glm-5",
}, },
"writing": { "writing": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
}, },
} }
@ -1022,7 +1022,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "medium", "variant": "medium",
}, },
"librarian": { "librarian": {
"model": "opencode/glm-4.7-free", "model": "opencode/big-pickle",
}, },
"metis": { "metis": {
"model": "anthropic/claude-opus-4-6", "model": "anthropic/claude-opus-4-6",

View File

@ -130,7 +130,7 @@ export async function runCliInstaller(args: InstallArgs, version: string): Promi
!config.hasCopilot && !config.hasCopilot &&
!config.hasOpencodeZen !config.hasOpencodeZen
) { ) {
printWarning("No model providers configured. Using opencode/glm-4.7-free as fallback.") printWarning("No model providers configured. Using opencode/big-pickle as fallback.")
} }
console.log(`${SYMBOLS.star} ${color.bold(color.green(isUpdate ? "Configuration updated!" : "Installation complete!"))}`) console.log(`${SYMBOLS.star} ${color.bold(color.green(isUpdate ? "Configuration updated!" : "Installation complete!"))}`)

View File

@ -18,7 +18,7 @@ export type { GeneratedOmoConfig } from "./model-fallback-types"
const ZAI_MODEL = "zai-coding-plan/glm-4.7" const ZAI_MODEL = "zai-coding-plan/glm-4.7"
const ULTIMATE_FALLBACK = "opencode/glm-4.7-free" const ULTIMATE_FALLBACK = "opencode/big-pickle"
const SCHEMA_URL = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json" const SCHEMA_URL = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json"

View File

@ -32,7 +32,7 @@ export async function promptInstallConfig(detected: DetectedConfig): Promise<Ins
const claude = await selectOrCancel<ClaudeSubscription>({ const claude = await selectOrCancel<ClaudeSubscription>({
message: "Do you have a Claude Pro/Max subscription?", message: "Do you have a Claude Pro/Max subscription?",
options: [ options: [
{ value: "no", label: "No", hint: "Will use opencode/glm-4.7-free as fallback" }, { value: "no", label: "No", hint: "Will use opencode/big-pickle as fallback" },
{ value: "yes", label: "Yes (standard)", hint: "Claude Opus 4.5 for orchestration" }, { value: "yes", label: "Yes (standard)", hint: "Claude Opus 4.5 for orchestration" },
{ value: "max20", label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.6 for Librarian" }, { value: "max20", label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.6 for Librarian" },
], ],

View File

@ -98,7 +98,7 @@ export async function runTuiInstaller(args: InstallArgs, version: string): Promi
} }
if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) { if (!config.hasClaude && !config.hasOpenAI && !config.hasGemini && !config.hasCopilot && !config.hasOpencodeZen) {
p.log.warn("No model providers configured. Using opencode/glm-4.7-free as fallback.") p.log.warn("No model providers configured. Using opencode/big-pickle as fallback.")
} }
p.note(formatConfigSummary(config), isUpdate ? "Updated Configuration" : "Installation Complete") p.note(formatConfigSummary(config), isUpdate ? "Updated Configuration" : "Installation Complete")

View File

@ -46,7 +46,7 @@ describe("Agent Config Integration", () => {
const config = { const config = {
sisyphus: { model: "anthropic/claude-opus-4-6" }, sisyphus: { model: "anthropic/claude-opus-4-6" },
oracle: { model: "openai/gpt-5.2" }, oracle: { model: "openai/gpt-5.2" },
librarian: { model: "opencode/glm-4.7-free" }, librarian: { model: "opencode/big-pickle" },
} }
// when - migration is applied // when - migration is applied
@ -65,7 +65,7 @@ describe("Agent Config Integration", () => {
Sisyphus: { model: "anthropic/claude-opus-4-6" }, Sisyphus: { model: "anthropic/claude-opus-4-6" },
oracle: { model: "openai/gpt-5.2" }, oracle: { model: "openai/gpt-5.2" },
"Prometheus (Planner)": { model: "anthropic/claude-opus-4-6" }, "Prometheus (Planner)": { model: "anthropic/claude-opus-4-6" },
librarian: { model: "opencode/glm-4.7-free" }, librarian: { model: "opencode/big-pickle" },
} }
// when - migration is applied // when - migration is applied

View File

@ -251,7 +251,7 @@ describe("fuzzyMatchModel", () => {
it("should match github-copilot claude-opus-4-6 to claude-opus-4.6", () => { it("should match github-copilot claude-opus-4-6 to claude-opus-4.6", () => {
const available = new Set([ const available = new Set([
"github-copilot/claude-opus-4.6", "github-copilot/claude-opus-4.6",
"opencode/glm-4.7-free", "opencode/big-pickle",
]) ])
const result = fuzzyMatchModel("claude-opus-4-6", available, ["github-copilot"]) const result = fuzzyMatchModel("claude-opus-4-6", available, ["github-copilot"])
expect(result).toBe("github-copilot/claude-opus-4.6") expect(result).toBe("github-copilot/claude-opus-4.6")
@ -327,16 +327,16 @@ describe("fuzzyMatchModel", () => {
expect(result).toBe("anthropic/claude-opus-4-6") expect(result).toBe("anthropic/claude-opus-4-6")
}) })
// given available models with similar model IDs (e.g., glm-4.7 and glm-4.7-free) // given available models with similar model IDs (e.g., glm-4.7 and big-pickle)
// when searching for the longer variant (glm-4.7-free) // when searching for the longer variant (big-pickle)
// then return exact model ID match, not the shorter one // then return exact model ID match, not the shorter one
it("should prefer exact model ID match over shorter substring match", () => { it("should prefer exact model ID match over shorter substring match", () => {
const available = new Set([ const available = new Set([
"zai-coding-plan/glm-4.7", "zai-coding-plan/glm-4.7",
"zai-coding-plan/glm-4.7-free", "zai-coding-plan/big-pickle",
]) ])
const result = fuzzyMatchModel("glm-4.7-free", available) const result = fuzzyMatchModel("big-pickle", available)
expect(result).toBe("zai-coding-plan/glm-4.7-free") expect(result).toBe("zai-coding-plan/big-pickle")
}) })
// given available models with similar model IDs // given available models with similar model IDs
@ -345,7 +345,7 @@ describe("fuzzyMatchModel", () => {
it("should still prefer shorter match when searching for shorter variant", () => { it("should still prefer shorter match when searching for shorter variant", () => {
const available = new Set([ const available = new Set([
"zai-coding-plan/glm-4.7", "zai-coding-plan/glm-4.7",
"zai-coding-plan/glm-4.7-free", "zai-coding-plan/big-pickle",
]) ])
const result = fuzzyMatchModel("glm-4.7", available) const result = fuzzyMatchModel("glm-4.7", available)
expect(result).toBe("zai-coding-plan/glm-4.7") expect(result).toBe("zai-coding-plan/glm-4.7")
@ -690,13 +690,13 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
it("should prefer provider-models cache over models.json", async () => { it("should prefer provider-models cache over models.json", async () => {
writeProviderModelsCache({ writeProviderModelsCache({
models: { models: {
opencode: ["glm-4.7-free", "gpt-5-nano"], opencode: ["big-pickle", "gpt-5-nano"],
anthropic: ["claude-opus-4-6"] anthropic: ["claude-opus-4-6"]
}, },
connected: ["opencode", "anthropic"] connected: ["opencode", "anthropic"]
}) })
writeModelsCache({ writeModelsCache({
opencode: { models: { "glm-4.7-free": {}, "gpt-5-nano": {}, "gpt-5.2": {} } }, opencode: { models: { "big-pickle": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-6": {} } } anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-6": {} } }
}) })
@ -705,7 +705,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
}) })
expect(result.size).toBe(3) expect(result.size).toBe(3)
expect(result.has("opencode/glm-4.7-free")).toBe(true) expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true) expect(result.has("opencode/gpt-5-nano")).toBe(true)
expect(result.has("anthropic/claude-opus-4-6")).toBe(true) expect(result.has("anthropic/claude-opus-4-6")).toBe(true)
expect(result.has("opencode/gpt-5.2")).toBe(false) expect(result.has("opencode/gpt-5.2")).toBe(false)
@ -738,7 +738,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
// then falls back to models.json (no whitelist filtering) // then falls back to models.json (no whitelist filtering)
it("should fallback to models.json when provider-models cache not found", async () => { it("should fallback to models.json when provider-models cache not found", async () => {
writeModelsCache({ writeModelsCache({
opencode: { models: { "glm-4.7-free": {}, "gpt-5-nano": {}, "gpt-5.2": {} } }, opencode: { models: { "big-pickle": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
}) })
const result = await fetchAvailableModels(undefined, { const result = await fetchAvailableModels(undefined, {
@ -746,7 +746,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
}) })
expect(result.size).toBe(3) expect(result.size).toBe(3)
expect(result.has("opencode/glm-4.7-free")).toBe(true) expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true) expect(result.has("opencode/gpt-5-nano")).toBe(true)
expect(result.has("opencode/gpt-5.2")).toBe(true) expect(result.has("opencode/gpt-5.2")).toBe(true)
}) })
@ -757,7 +757,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
it("should filter by connectedProviders even with provider-models cache", async () => { it("should filter by connectedProviders even with provider-models cache", async () => {
writeProviderModelsCache({ writeProviderModelsCache({
models: { models: {
opencode: ["glm-4.7-free"], opencode: ["big-pickle"],
anthropic: ["claude-opus-4-6"], anthropic: ["claude-opus-4-6"],
google: ["gemini-3-pro"] google: ["gemini-3-pro"]
}, },
@ -769,7 +769,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
}) })
expect(result.size).toBe(1) expect(result.size).toBe(1)
expect(result.has("opencode/glm-4.7-free")).toBe(true) expect(result.has("opencode/big-pickle")).toBe(true)
expect(result.has("anthropic/claude-opus-4-6")).toBe(false) expect(result.has("anthropic/claude-opus-4-6")).toBe(false)
expect(result.has("google/gemini-3-pro")).toBe(false) expect(result.has("google/gemini-3-pro")).toBe(false)
}) })

View File

@ -81,7 +81,7 @@ export function fuzzyMatchModel(
} }
// Priority 2: Exact model ID match (part after provider/) // Priority 2: Exact model ID match (part after provider/)
// This ensures "glm-4.7-free" matches "zai-coding-plan/glm-4.7-free" over "zai-coding-plan/glm-4.7" // This ensures "big-pickle" matches "zai-coding-plan/big-pickle" over "zai-coding-plan/glm-4.7"
// Use filter + shortest to handle multi-provider cases (e.g., openai/gpt-5.2 + opencode/gpt-5.2) // Use filter + shortest to handle multi-provider cases (e.g., openai/gpt-5.2 + opencode/gpt-5.2)
const exactModelIdMatches = matches.filter((model) => { const exactModelIdMatches = matches.filter((model) => {
const modelId = model.split("/").slice(1).join("/") const modelId = model.split("/").slice(1).join("/")

View File

@ -28,7 +28,7 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
const sisyphus = AGENT_MODEL_REQUIREMENTS["sisyphus"] const sisyphus = AGENT_MODEL_REQUIREMENTS["sisyphus"]
// #when - accessing Sisyphus requirement // #when - accessing Sisyphus requirement
// #then - fallbackChain has claude-opus-4-6 first, glm-4.7-free last // #then - fallbackChain has claude-opus-4-6 first, big-pickle last
expect(sisyphus).toBeDefined() expect(sisyphus).toBeDefined()
expect(sisyphus.fallbackChain).toBeArray() expect(sisyphus.fallbackChain).toBeArray()
expect(sisyphus.fallbackChain).toHaveLength(5) expect(sisyphus.fallbackChain).toHaveLength(5)
@ -41,7 +41,7 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
const last = sisyphus.fallbackChain[4] const last = sisyphus.fallbackChain[4]
expect(last.providers[0]).toBe("opencode") expect(last.providers[0]).toBe("opencode")
expect(last.model).toBe("glm-4.7-free") expect(last.model).toBe("big-pickle")
}) })
test("librarian has valid fallbackChain with glm-4.7 as primary", () => { test("librarian has valid fallbackChain with glm-4.7 as primary", () => {
@ -404,7 +404,7 @@ describe("FallbackEntry type", () => {
// given - a FallbackEntry without variant // given - a FallbackEntry without variant
const entry: FallbackEntry = { const entry: FallbackEntry = {
providers: ["opencode", "anthropic"], providers: ["opencode", "anthropic"],
model: "glm-4.7-free", model: "big-pickle",
} }
// when - accessing variant // when - accessing variant
@ -434,7 +434,7 @@ describe("ModelRequirement type", () => {
test("ModelRequirement variant is optional", () => { test("ModelRequirement variant is optional", () => {
// given - a ModelRequirement without top-level variant // given - a ModelRequirement without top-level variant
const requirement: ModelRequirement = { const requirement: ModelRequirement = {
fallbackChain: [{ providers: ["opencode"], model: "glm-4.7-free" }], fallbackChain: [{ providers: ["opencode"], model: "big-pickle" }],
} }
// when - accessing variant // when - accessing variant

View File

@ -19,7 +19,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["kimi-for-coding"], model: "k2p5" }, { providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["opencode"], model: "kimi-k2.5-free" }, { providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["zai-coding-plan"], model: "glm-4.7" }, { providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" }, { providers: ["opencode"], model: "big-pickle" },
], ],
requiresAnyModel: true, requiresAnyModel: true,
}, },
@ -39,7 +39,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
librarian: { librarian: {
fallbackChain: [ fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" }, { providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" }, { providers: ["opencode"], model: "big-pickle" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
], ],
}, },

View File

@ -117,7 +117,7 @@ describe("resolveModelWithFallback", () => {
test("returns uiSelectedModel with override source when provided", () => { test("returns uiSelectedModel with override source when provided", () => {
// given // given
const input: ExtendedModelResolutionInput = { const input: ExtendedModelResolutionInput = {
uiSelectedModel: "opencode/glm-4.7-free", uiSelectedModel: "opencode/big-pickle",
userModel: "anthropic/claude-opus-4-6", userModel: "anthropic/claude-opus-4-6",
fallbackChain: [ fallbackChain: [
{ providers: ["anthropic", "github-copilot"], model: "claude-opus-4-6" }, { providers: ["anthropic", "github-copilot"], model: "claude-opus-4-6" },
@ -130,15 +130,15 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input) const result = resolveModelWithFallback(input)
// then // then
expect(result!.model).toBe("opencode/glm-4.7-free") expect(result!.model).toBe("opencode/big-pickle")
expect(result!.source).toBe("override") expect(result!.source).toBe("override")
expect(logSpy).toHaveBeenCalledWith("Model resolved via UI selection", { model: "opencode/glm-4.7-free" }) expect(logSpy).toHaveBeenCalledWith("Model resolved via UI selection", { model: "opencode/big-pickle" })
}) })
test("UI selection takes priority over config override", () => { test("UI selection takes priority over config override", () => {
// given // given
const input: ExtendedModelResolutionInput = { const input: ExtendedModelResolutionInput = {
uiSelectedModel: "opencode/glm-4.7-free", uiSelectedModel: "opencode/big-pickle",
userModel: "anthropic/claude-opus-4-6", userModel: "anthropic/claude-opus-4-6",
availableModels: new Set(["anthropic/claude-opus-4-6"]), availableModels: new Set(["anthropic/claude-opus-4-6"]),
systemDefaultModel: "google/gemini-3-pro", systemDefaultModel: "google/gemini-3-pro",
@ -148,7 +148,7 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input) const result = resolveModelWithFallback(input)
// then // then
expect(result!.model).toBe("opencode/glm-4.7-free") expect(result!.model).toBe("opencode/big-pickle")
expect(result!.source).toBe("override") expect(result!.source).toBe("override")
}) })