fix(model-requirements): use supported variant for gemini-3-pro (#1463)

* fix(model-requirements): use supported variant for gemini-3-pro

* fix(delegate-task): update artistry variant to high for gemini-3-pro

- Update DEFAULT_CATEGORIES artistry variant from 'max' to 'high'
- Update related test comment
- gemini-3-pro only supports low/high thinking levels, not max
- Addresses Oracle review feedback
This commit is contained in:
YeonGyu-Kim 2026-02-04 11:26:17 +09:00 committed by GitHub
parent faae3d0f32
commit b954afca90
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 40 additions and 40 deletions

View File

@ -335,18 +335,18 @@ exports[`generateModelConfig single native provider uses Gemini models when only
}, },
"metis": { "metis": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"momus": { "momus": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "google/gemini-3-flash", "model": "google/gemini-3-flash",
}, },
"oracle": { "oracle": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"prometheus": { "prometheus": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
@ -355,14 +355,14 @@ exports[`generateModelConfig single native provider uses Gemini models when only
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"quick": { "quick": {
"model": "google/gemini-3-flash", "model": "google/gemini-3-flash",
}, },
"ultrabrain": { "ultrabrain": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"unspecified-high": { "unspecified-high": {
"model": "google/gemini-3-flash", "model": "google/gemini-3-flash",
@ -395,18 +395,18 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
}, },
"metis": { "metis": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"momus": { "momus": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"multimodal-looker": { "multimodal-looker": {
"model": "google/gemini-3-flash", "model": "google/gemini-3-flash",
}, },
"oracle": { "oracle": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"prometheus": { "prometheus": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
@ -415,14 +415,14 @@ exports[`generateModelConfig single native provider uses Gemini models with isMa
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"quick": { "quick": {
"model": "google/gemini-3-flash", "model": "google/gemini-3-flash",
}, },
"ultrabrain": { "ultrabrain": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"unspecified-high": { "unspecified-high": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
@ -484,7 +484,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "openai/gpt-5.2-codex", "model": "openai/gpt-5.2-codex",
@ -557,7 +557,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "openai/gpt-5.2-codex", "model": "openai/gpt-5.2-codex",
@ -631,7 +631,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"categories": { "categories": {
"artistry": { "artistry": {
"model": "opencode/gemini-3-pro", "model": "opencode/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "opencode/gpt-5.2-codex", "model": "opencode/gpt-5.2-codex",
@ -704,7 +704,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"categories": { "categories": {
"artistry": { "artistry": {
"model": "opencode/gemini-3-pro", "model": "opencode/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "opencode/gpt-5.2-codex", "model": "opencode/gpt-5.2-codex",
@ -778,7 +778,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"categories": { "categories": {
"artistry": { "artistry": {
"model": "github-copilot/gemini-3-pro-preview", "model": "github-copilot/gemini-3-pro-preview",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "github-copilot/gpt-5.2-codex", "model": "github-copilot/gpt-5.2-codex",
@ -851,7 +851,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"categories": { "categories": {
"artistry": { "artistry": {
"model": "github-copilot/gemini-3-pro-preview", "model": "github-copilot/gemini-3-pro-preview",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "github-copilot/gpt-5.2-codex", "model": "github-copilot/gpt-5.2-codex",
@ -1035,7 +1035,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"categories": { "categories": {
"artistry": { "artistry": {
"model": "opencode/gemini-3-pro", "model": "opencode/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "opencode/gpt-5.2-codex", "model": "opencode/gpt-5.2-codex",
@ -1108,7 +1108,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"categories": { "categories": {
"artistry": { "artistry": {
"model": "github-copilot/gemini-3-pro-preview", "model": "github-copilot/gemini-3-pro-preview",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "openai/gpt-5.2-codex", "model": "openai/gpt-5.2-codex",
@ -1225,7 +1225,7 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
}, },
"oracle": { "oracle": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"prometheus": { "prometheus": {
"model": "anthropic/claude-opus-4-5", "model": "anthropic/claude-opus-4-5",
@ -1239,14 +1239,14 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"quick": { "quick": {
"model": "anthropic/claude-haiku-4-5", "model": "anthropic/claude-haiku-4-5",
}, },
"ultrabrain": { "ultrabrain": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"unspecified-high": { "unspecified-high": {
"model": "anthropic/claude-sonnet-4-5", "model": "anthropic/claude-sonnet-4-5",
@ -1308,7 +1308,7 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"categories": { "categories": {
"artistry": { "artistry": {
"model": "github-copilot/gemini-3-pro-preview", "model": "github-copilot/gemini-3-pro-preview",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "github-copilot/gpt-5.2-codex", "model": "github-copilot/gpt-5.2-codex",
@ -1381,7 +1381,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "openai/gpt-5.2-codex", "model": "openai/gpt-5.2-codex",
@ -1454,7 +1454,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"categories": { "categories": {
"artistry": { "artistry": {
"model": "google/gemini-3-pro", "model": "google/gemini-3-pro",
"variant": "max", "variant": "high",
}, },
"deep": { "deep": {
"model": "openai/gpt-5.2-codex", "model": "openai/gpt-5.2-codex",

View File

@ -313,7 +313,7 @@ describe("CATEGORY_MODEL_REQUIREMENTS", () => {
const primary = artistry.fallbackChain[0] const primary = artistry.fallbackChain[0]
expect(primary.model).toBe("gemini-3-pro") expect(primary.model).toBe("gemini-3-pro")
expect(primary.variant).toBe("max") expect(primary.variant).toBe("high")
expect(primary.providers[0]).toBe("google") expect(primary.providers[0]).toBe("google")
}) })

View File

@ -31,7 +31,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
oracle: { oracle: {
fallbackChain: [ fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
], ],
}, },
@ -75,14 +75,14 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
{ providers: ["kimi-for-coding"], model: "k2p5" }, { providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["opencode"], model: "kimi-k2.5-free" }, { providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
], ],
}, },
momus: { momus: {
fallbackChain: [ fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "medium" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
], ],
}, },
atlas: { atlas: {
@ -107,7 +107,7 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
ultrabrain: { ultrabrain: {
fallbackChain: [ fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "xhigh" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "xhigh" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
], ],
}, },
@ -115,13 +115,13 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [ fallbackChain: [
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2-codex", variant: "medium" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
], ],
requiresModel: "gpt-5.2-codex", requiresModel: "gpt-5.2-codex",
}, },
artistry: { artistry: {
fallbackChain: [ fallbackChain: [
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "max" }, { providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro", variant: "high" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" }, { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
], ],

View File

@ -196,7 +196,7 @@ export const DEFAULT_CATEGORIES: Record<string, CategoryConfig> = {
"visual-engineering": { model: "google/gemini-3-pro" }, "visual-engineering": { model: "google/gemini-3-pro" },
ultrabrain: { model: "openai/gpt-5.2-codex", variant: "xhigh" }, ultrabrain: { model: "openai/gpt-5.2-codex", variant: "xhigh" },
deep: { model: "openai/gpt-5.2-codex", variant: "medium" }, deep: { model: "openai/gpt-5.2-codex", variant: "medium" },
artistry: { model: "google/gemini-3-pro", variant: "max" }, artistry: { model: "google/gemini-3-pro", variant: "high" },
quick: { model: "anthropic/claude-haiku-4-5" }, quick: { model: "anthropic/claude-haiku-4-5" },
"unspecified-low": { model: "anthropic/claude-sonnet-4-5" }, "unspecified-low": { model: "anthropic/claude-sonnet-4-5" },
"unspecified-high": { model: "anthropic/claude-opus-4-5", variant: "max" }, "unspecified-high": { model: "anthropic/claude-opus-4-5", variant: "max" },

View File

@ -1492,7 +1492,7 @@ describe("sisyphus-task", () => {
abort: new AbortController().signal, abort: new AbortController().signal,
} }
// when - artistry category (gemini-3-pro with max variant) // when - artistry category (gemini-3-pro with high variant)
const result = await tool.execute( const result = await tool.execute(
{ {
description: "Test artistry forced background", description: "Test artistry forced background",