refactor(librarian): switch fallback to minimax-m2.5-free → gemini-3-flash → big-pickle

This commit is contained in:
YeonGyu-Kim 2026-02-19 14:37:27 +09:00
parent c8eb0dbae3
commit b7c6391bd5
7 changed files with 38 additions and 38 deletions

View File

@ -72,7 +72,7 @@ exports[`generateModelConfig single native provider uses Claude models when only
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -134,7 +134,7 @@ exports[`generateModelConfig single native provider uses Claude models with isMa
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -201,7 +201,7 @@ exports[`generateModelConfig single native provider uses OpenAI models when only
"variant": "medium",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "openai/gpt-5.2",
@ -268,7 +268,7 @@ exports[`generateModelConfig single native provider uses OpenAI models with isMa
"variant": "medium",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "openai/gpt-5.2",
@ -331,7 +331,7 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "google/gemini-3-pro",
@ -392,7 +392,7 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "google/gemini-3-pro",
@ -457,7 +457,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "medium",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -531,7 +531,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "medium",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -606,7 +606,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "medium",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "opencode/claude-opus-4-6",
@ -680,7 +680,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "medium",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "opencode/claude-opus-4-6",
@ -755,7 +755,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -829,7 +829,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -900,7 +900,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "opencode/big-pickle",
@ -955,7 +955,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
"model": "opencode/gpt-5-nano",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "opencode/big-pickle",
@ -1014,7 +1014,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "medium",
},
"librarian": {
"model": "opencode/big-pickle",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -1088,7 +1088,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -1158,7 +1158,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + ZAI combinat
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -1219,7 +1219,7 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-6",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -1289,7 +1289,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "medium",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -1363,7 +1363,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "medium",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -1437,7 +1437,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "medium",
},
"librarian": {
"model": "zai-coding-plan/glm-5",
"model": "opencode/minimax-m2.5-free",
},
"metis": {
"model": "anthropic/claude-opus-4-6",

View File

@ -44,7 +44,7 @@ Model Providers (Priority: Native > Copilot > OpenCode Zen > Z.ai > Kimi):
Gemini Native google/ models (Gemini 3 Pro, Flash)
Copilot github-copilot/ models (fallback)
OpenCode Zen opencode/ models (opencode/claude-opus-4-6, etc.)
Z.ai zai-coding-plan/glm-5 (Librarian priority)
Z.ai zai-coding-plan/glm-5 (visual-engineering fallback)
Kimi kimi-for-coding/k2p5 (Sisyphus/Prometheus fallback)
`)
.action(async (options) => {

View File

@ -281,7 +281,7 @@ describe("generateOmoConfig - model fallback system", () => {
expect((result.agents as Record<string, { model: string }>).sisyphus).toBeUndefined()
})
test("uses zai-coding-plan/glm-5 for librarian when Z.ai available", () => {
test("uses opencode/minimax-m2.5-free for librarian regardless of Z.ai", () => {
// #given user has Z.ai and Claude max20
const config: InstallConfig = {
hasClaude: true,
@ -297,8 +297,8 @@ describe("generateOmoConfig - model fallback system", () => {
// #when generating config
const result = generateOmoConfig(config)
// #then librarian should use zai-coding-plan/glm-5
expect((result.agents as Record<string, { model: string }>).librarian.model).toBe("zai-coding-plan/glm-5")
// #then librarian should use opencode/minimax-m2.5-free
expect((result.agents as Record<string, { model: string }>).librarian.model).toBe("opencode/minimax-m2.5-free")
// #then Sisyphus uses Claude (OR logic)
expect((result.agents as Record<string, { model: string }>).sisyphus.model).toBe("anthropic/claude-opus-4-6")
})

View File

@ -491,18 +491,18 @@ describe("generateModelConfig", () => {
const result = generateModelConfig(config)
// #then librarian should use ZAI_MODEL
expect(result.agents?.librarian?.model).toBe("zai-coding-plan/glm-5")
expect(result.agents?.librarian?.model).toBe("opencode/minimax-m2.5-free")
})
test("librarian uses claude-sonnet when ZAI not available but Claude is", () => {
test("librarian always uses minimax-m2.5-free regardless of provider availability", () => {
// #given only Claude is available (no ZAI)
const config = createConfig({ hasClaude: true })
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then librarian should use claude-sonnet-4-6 (third in fallback chain after ZAI and opencode/glm)
expect(result.agents?.librarian?.model).toBe("anthropic/claude-sonnet-4-6")
// #then librarian should use opencode/minimax-m2.5-free (always first in chain)
expect(result.agents?.librarian?.model).toBe("opencode/minimax-m2.5-free")
})
})

View File

@ -16,7 +16,7 @@ import {
export type { GeneratedOmoConfig } from "./model-fallback-types"
const ZAI_MODEL = "zai-coding-plan/glm-5"
const LIBRARIAN_MODEL = "opencode/minimax-m2.5-free"
const ULTIMATE_FALLBACK = "opencode/big-pickle"
const SCHEMA_URL = "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json"
@ -52,8 +52,8 @@ export function generateModelConfig(config: InstallConfig): GeneratedOmoConfig {
const categories: Record<string, CategoryConfig> = {}
for (const [role, req] of Object.entries(AGENT_MODEL_REQUIREMENTS)) {
if (role === "librarian" && avail.zai) {
agents[role] = { model: ZAI_MODEL }
if (role === "librarian") {
agents[role] = { model: LIBRARIAN_MODEL }
continue
}

View File

@ -44,19 +44,19 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
expect(last.model).toBe("big-pickle")
})
test("librarian has valid fallbackChain with glm-5 as primary", () => {
test("librarian has valid fallbackChain with minimax-m2.5-free as primary", () => {
// given - librarian agent requirement
const librarian = AGENT_MODEL_REQUIREMENTS["librarian"]
// when - accessing librarian requirement
// then - fallbackChain exists with glm-5 as first entry
// then - fallbackChain exists with minimax-m2.5-free as first entry
expect(librarian).toBeDefined()
expect(librarian.fallbackChain).toBeArray()
expect(librarian.fallbackChain.length).toBeGreaterThan(0)
const primary = librarian.fallbackChain[0]
expect(primary.providers[0]).toBe("zai-coding-plan")
expect(primary.model).toBe("glm-5")
expect(primary.providers[0]).toBe("opencode")
expect(primary.model).toBe("minimax-m2.5-free")
})
test("explore has valid fallbackChain with grok-code-fast-1 as primary", () => {

View File

@ -38,9 +38,9 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
},
librarian: {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-5" },
{ providers: ["opencode"], model: "minimax-m2.5-free" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["opencode"], model: "big-pickle" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
],
},
explore: {