feat: make gpt-5.3-codex medium the primary model for multimodal-looker
GPT-5.3 Codex has strong multimodal capabilities. Promote it to first candidate in multimodal-looker fallback chain, with gemini-3-flash following (matching the ULW pattern of gpt-5.3-codex -> gemini).
This commit is contained in:
parent
42641a9922
commit
1ef5c17c35
@ -44,6 +44,7 @@ export const CLI_AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
|
|||||||
},
|
},
|
||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
fallbackChain: [
|
fallbackChain: [
|
||||||
|
{ providers: ["openai", "opencode"], model: "gpt-5.3-codex", variant: "medium" },
|
||||||
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
||||||
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
||||||
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
||||||
|
|||||||
@ -86,21 +86,25 @@ describe("AGENT_MODEL_REQUIREMENTS", () => {
|
|||||||
expect(quaternary.model).toBe("gpt-5-nano")
|
expect(quaternary.model).toBe("gpt-5-nano")
|
||||||
})
|
})
|
||||||
|
|
||||||
test("multimodal-looker has valid fallbackChain with kimi-k2.5-free as primary", () => {
|
test("multimodal-looker has valid fallbackChain with gpt-5.3-codex as primary", () => {
|
||||||
// given - multimodal-looker agent requirement
|
// given - multimodal-looker agent requirement
|
||||||
const multimodalLooker = AGENT_MODEL_REQUIREMENTS["multimodal-looker"]
|
const multimodalLooker = AGENT_MODEL_REQUIREMENTS["multimodal-looker"]
|
||||||
|
|
||||||
// when - accessing multimodal-looker requirement
|
// when - accessing multimodal-looker requirement
|
||||||
// then - fallbackChain exists with kimi-k2.5-free first, gpt-5-nano last
|
// then - fallbackChain exists with gpt-5.3-codex first, gemini-3-flash second, gpt-5-nano last
|
||||||
expect(multimodalLooker).toBeDefined()
|
expect(multimodalLooker).toBeDefined()
|
||||||
expect(multimodalLooker.fallbackChain).toBeArray()
|
expect(multimodalLooker.fallbackChain).toBeArray()
|
||||||
expect(multimodalLooker.fallbackChain).toHaveLength(5)
|
expect(multimodalLooker.fallbackChain).toHaveLength(6)
|
||||||
|
|
||||||
const primary = multimodalLooker.fallbackChain[0]
|
const primary = multimodalLooker.fallbackChain[0]
|
||||||
expect(primary.providers[0]).toBe("opencode")
|
expect(primary.providers).toEqual(["openai", "opencode"])
|
||||||
expect(primary.model).toBe("kimi-k2.5-free")
|
expect(primary.model).toBe("gpt-5.3-codex")
|
||||||
|
expect(primary.variant).toBe("medium")
|
||||||
|
|
||||||
const last = multimodalLooker.fallbackChain[4]
|
const secondary = multimodalLooker.fallbackChain[1]
|
||||||
|
expect(secondary.model).toBe("gemini-3-flash")
|
||||||
|
|
||||||
|
const last = multimodalLooker.fallbackChain[5]
|
||||||
expect(last.providers).toEqual(["openai", "github-copilot", "opencode"])
|
expect(last.providers).toEqual(["openai", "github-copilot", "opencode"])
|
||||||
expect(last.model).toBe("gpt-5-nano")
|
expect(last.model).toBe("gpt-5-nano")
|
||||||
})
|
})
|
||||||
|
|||||||
@ -53,8 +53,9 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
|
|||||||
},
|
},
|
||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
fallbackChain: [
|
fallbackChain: [
|
||||||
{ providers: ["opencode"], model: "kimi-k2.5-free" },
|
{ providers: ["openai", "opencode"], model: "gpt-5.3-codex", variant: "medium" },
|
||||||
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
||||||
|
{ providers: ["opencode"], model: "kimi-k2.5-free" },
|
||||||
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
||||||
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
||||||
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5-nano" },
|
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5-nano" },
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user