chore: upgrade claude-sonnet-4-5 to claude-sonnet-4-6 across codebase

This commit is contained in:
YeonGyu-Kim 2026-02-18 15:51:24 +09:00
parent 3f16057a4b
commit 96ff1e00cc
46 changed files with 201 additions and 200 deletions

View File

@ -135,14 +135,14 @@ jobs:
"limit": { "context": 190000, "output": 128000 },
"options": { "effort": "high", "thinking": { "type": "enabled", "budgetTokens": 64000 } }
},
"claude-sonnet-4-5": {
"id": "claude-sonnet-4-5-20250929",
"name": "Sonnet 4.5",
"claude-sonnet-4-6": {
"id": "claude-sonnet-4-6-20250929",
"name": "Sonnet 4.6",
"limit": { "context": 200000, "output": 64000 }
},
"claude-sonnet-4-5-high": {
"id": "claude-sonnet-4-5-20250929",
"name": "Sonnet 4.5 High",
"claude-sonnet-4-6-high": {
"id": "claude-sonnet-4-6-20250929",
"name": "Sonnet 4.6 High",
"limit": { "context": 200000, "output": 128000 },
"options": { "thinking": { "type": "enabled", "budgetTokens": 64000 } }
},

View File

@ -177,7 +177,7 @@ import type { AgentConfig } from "./types";
export const myAgent: AgentConfig = {
name: "my-agent",
model: "anthropic/claude-sonnet-4-5",
model: "anthropic/claude-sonnet-4-6",
description: "Description of what this agent does",
prompt: `Your agent's system prompt here`,
temperature: 0.1,

View File

@ -26,7 +26,7 @@ A Category is an agent configuration preset optimized for specific domains.
| `deep` | `openai/gpt-5.3-codex` (medium) | Goal-oriented autonomous problem-solving. Thorough research before action. For hairy problems requiring deep understanding. |
| `artistry` | `google/gemini-3-pro` (max) | Highly creative/artistic tasks, novel ideas |
| `quick` | `anthropic/claude-haiku-4-5` | Trivial tasks - single file changes, typo fixes, simple modifications |
| `unspecified-low` | `anthropic/claude-sonnet-4-5` | Tasks that don't fit other categories, low effort required |
| `unspecified-low` | `anthropic/claude-sonnet-4-6` | Tasks that don't fit other categories, low effort required |
| `unspecified-high` | `anthropic/claude-opus-4-6` (max) | Tasks that don't fit other categories, high effort required |
| `writing` | `google/gemini-3-flash` | Documentation, prose, technical writing |

View File

@ -665,7 +665,7 @@ You can also customize Sisyphus agents like other agents:
"model": "openai/gpt-5.2"
},
"Metis (Plan Consultant)": {
"model": "anthropic/claude-sonnet-4-5"
"model": "anthropic/claude-sonnet-4-6"
}
}
}
@ -729,7 +729,7 @@ All 8 categories come with optimal model defaults, but **you must configure them
| `deep` | `openai/gpt-5.3-codex` (medium) | Goal-oriented autonomous problem-solving, thorough research before action |
| `artistry` | `google/gemini-3-pro` (high) | Highly creative/artistic tasks, novel ideas |
| `quick` | `anthropic/claude-haiku-4-5` | Trivial tasks - single file changes, typo fixes, simple modifications|
| `unspecified-low` | `anthropic/claude-sonnet-4-5` | Tasks that don't fit other categories, low effort required |
| `unspecified-low` | `anthropic/claude-sonnet-4-6` | Tasks that don't fit other categories, low effort required |
| `unspecified-high` | `anthropic/claude-opus-4-6` (max) | Tasks that don't fit other categories, high effort required |
| `writing` | `kimi-for-coding/k2p5` | Documentation, prose, technical writing |
@ -747,12 +747,12 @@ All 8 categories come with optimal model defaults, but **you must configure them
```json
// opencode.json
{ "model": "anthropic/claude-sonnet-4-5" }
{ "model": "anthropic/claude-sonnet-4-6" }
// oh-my-opencode.json (empty categories section)
{}
// Result: ALL categories use claude-sonnet-4-5 (wasteful!)
// Result: ALL categories use claude-sonnet-4-6 (wasteful!)
// - quick tasks use Sonnet instead of Haiku (expensive)
// - ultrabrain uses Sonnet instead of GPT-5.2 (inferior reasoning)
// - visual tasks use Sonnet instead of Gemini (suboptimal for UI)
@ -784,7 +784,7 @@ All 8 categories come with optimal model defaults, but **you must configure them
"model": "anthropic/claude-haiku-4-5" // Fast + cheap for trivial tasks
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5"
"model": "anthropic/claude-sonnet-4-6"
},
"unspecified-high": {
"model": "anthropic/claude-opus-4-6",
@ -818,7 +818,7 @@ Add your own categories or override built-in ones:
{
"categories": {
"data-science": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
"temperature": 0.2,
"prompt_append": "Focus on data analysis, ML pipelines, and statistical methods."
},
@ -921,7 +921,7 @@ Categories follow the same resolution logic:
| **deep** | `gpt-5.3-codex` | openai/github-copilot/opencode → anthropic/github-copilot/opencode → google/github-copilot/opencode |
| **artistry** | `gemini-3-pro` | google/github-copilot/opencode → anthropic/github-copilot/opencode → openai/github-copilot/opencode |
| **quick** | `claude-haiku-4-5` | anthropic/github-copilot/opencode → google/github-copilot/opencode → opencode |
| **unspecified-low** | `claude-sonnet-4-5` | anthropic/github-copilot/opencode → openai/github-copilot/opencode → google/github-copilot/opencode |
| **unspecified-low** | `claude-sonnet-4-6` | anthropic/github-copilot/opencode → openai/github-copilot/opencode → google/github-copilot/opencode |
| **unspecified-high** | `claude-opus-4-6` | anthropic/github-copilot/opencode → openai/github-copilot/opencode → google/github-copilot/opencode |
| **writing** | `k2p5` | kimi-for-coding → google/github-copilot/opencode → anthropic/github-copilot/opencode |
@ -947,7 +947,7 @@ Override any agent or category model in `oh-my-opencode.json`:
{
"agents": {
"Sisyphus": {
"model": "anthropic/claude-sonnet-4-5"
"model": "anthropic/claude-sonnet-4-6"
},
"oracle": {
"model": "openai/o3"

View File

@ -13,7 +13,7 @@ Oh-My-OpenCode provides 11 specialized AI agents. Each has distinct expertise, o
| **Sisyphus** | `anthropic/claude-opus-4-6` | **The default orchestrator.** Plans, delegates, and executes complex tasks using specialized subagents with aggressive parallel execution. Todo-driven workflow with extended thinking (32k budget). Fallback: k2p5 → kimi-k2.5-free → glm-4.7 → glm-4.7-free. |
| **Hephaestus** | `openai/gpt-5.3-codex` | **The Legitimate Craftsman.** Autonomous deep worker inspired by AmpCode's deep mode. Goal-oriented execution with thorough research before action. Explores codebase patterns, completes tasks end-to-end without premature stopping. Named after the Greek god of forge and craftsmanship. Requires gpt-5.3-codex (no fallback - only activates when this model is available). |
| **oracle** | `openai/gpt-5.2` | Architecture decisions, code review, debugging. Read-only consultation - stellar logical reasoning and deep analysis. Inspired by AmpCode. |
| **librarian** | `zai-coding-plan/glm-4.7` | Multi-repo analysis, documentation lookup, OSS implementation examples. Deep codebase understanding with evidence-based answers. Fallback: glm-4.7-free → claude-sonnet-4-5. |
| **librarian** | `zai-coding-plan/glm-4.7` | Multi-repo analysis, documentation lookup, OSS implementation examples. Deep codebase understanding with evidence-based answers. Fallback: glm-4.7-free → claude-sonnet-4-6. |
| **explore** | `github-copilot/grok-code-fast-1` | Fast codebase exploration and contextual grep. Fallback: claude-haiku-4-5 → gpt-5-nano. |
| **multimodal-looker** | `google/gemini-3-flash` | Visual content specialist. Analyzes PDFs, images, diagrams to extract information. Fallback: gpt-5.2 → glm-4.6v → k2p5 → kimi-k2.5-free → claude-haiku-4-5 → gpt-5-nano. |

View File

@ -162,8 +162,8 @@ The `opencode-antigravity-auth` plugin uses different model names than the built
**Available models (Antigravity quota)**:
- `google/antigravity-gemini-3-pro` — variants: `low`, `high`
- `google/antigravity-gemini-3-flash` — variants: `minimal`, `low`, `medium`, `high`
- `google/antigravity-claude-sonnet-4-5` — no variants
- `google/antigravity-claude-sonnet-4-5-thinking` — variants: `low`, `max`
- `google/antigravity-claude-sonnet-4-6` — no variants
- `google/antigravity-claude-sonnet-4-6-thinking` — variants: `low`, `max`
- `google/antigravity-claude-opus-4-5-thinking` — variants: `low`, `max`
**Available models (Gemini CLI quota)**:

View File

@ -128,7 +128,7 @@ Here's a real-world config for a user with **Claude, OpenAI, Gemini, and Z.ai**
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
// Override specific agents only - rest use fallback chain
"atlas": { "model": "anthropic/claude-sonnet-4-5", "variant": "max" },
"atlas": { "model": "anthropic/claude-sonnet-4-6", "variant": "max" },
"librarian": { "model": "zai-coding-plan/glm-4.7" },
"explore": { "model": "opencode/gpt-5-nano" },
"multimodal-looker": { "model": "zai-coding-plan/glm-4.6v" }

View File

@ -33,7 +33,7 @@ flowchart TB
end
subgraph Workers["Worker Layer (Specialized Agents)"]
Junior["🪨 Sisyphus-Junior<br/>(Task Executor)<br/>Claude Sonnet 4.5"]
Junior["🪨 Sisyphus-Junior<br/>(Task Executor)<br/>Claude Sonnet 4.6"]
Oracle["🧠 Oracle<br/>(Architecture)<br/>GPT-5.2"]
Explore["🔍 Explore<br/>(Codebase Grep)<br/>Grok Code"]
Librarian["📚 Librarian<br/>(Docs/OSS)<br/>GLM-4.7"]
@ -298,7 +298,7 @@ task(category="quick", prompt="...") // "Just get it done fast"
| `artistry` | Gemini 3 Pro (max) | Highly creative/artistic tasks, novel ideas |
| `quick` | Claude Haiku 4.5 | Trivial tasks - single file changes, typo fixes |
| `deep` | GPT-5.3 Codex (medium) | Goal-oriented autonomous problem-solving, thorough research |
| `unspecified-low` | Claude Sonnet 4.5 | Tasks that don't fit other categories, low effort |
| `unspecified-low` | Claude Sonnet 4.6 | Tasks that don't fit other categories, low effort |
| `unspecified-high` | Claude Opus 4.6 (max) | Tasks that don't fit other categories, high effort |
| `writing` | K2P5 (Kimi) | Documentation, prose, technical writing |

View File

@ -294,7 +294,7 @@ flowchart TD
### ⚡ Atlas (The Plan Executor)
- **Model**: `anthropic/claude-sonnet-4-5` (Extended Thinking 32k)
- **Model**: `anthropic/claude-sonnet-4-6` (Extended Thinking 32k)
- **Role**: Execution and delegation
- **Characteristic**: Doesn't do everything directly, actively delegates to specialized agents (Frontend, Librarian, etc.).

View File

@ -13,14 +13,14 @@ Agent factories following `createXXXAgent(model) → AgentConfig` pattern. Each
| **Sisyphus** | claude-opus-4-6 | 0.1 | primary | kimi-k2.5 → glm-4.7 → gemini-3-pro | Main orchestrator, plans + delegates |
| **Hephaestus** | gpt-5.3-codex | 0.1 | primary | NONE (required) | Autonomous deep worker |
| **Oracle** | gpt-5.2 | 0.1 | subagent | claude-opus-4-6 → gemini-3-pro | Read-only consultation |
| **Librarian** | glm-4.7 | 0.1 | subagent | glm-4.7-free → claude-sonnet-4-5 | External docs/code search |
| **Librarian** | glm-4.7 | 0.1 | subagent | glm-4.7-free → claude-sonnet-4-6 | External docs/code search |
| **Explore** | grok-code-fast-1 | 0.1 | subagent | claude-haiku-4-5 → gpt-5-nano | Contextual grep |
| **Multimodal-Looker** | gemini-3-flash | 0.1 | subagent | gpt-5.2 → glm-4.6v → ... (6 deep) | PDF/image analysis |
| **Metis** | claude-opus-4-6 | **0.3** | subagent | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Pre-planning consultant |
| **Momus** | gpt-5.2 | 0.1 | subagent | claude-opus-4-6 → gemini-3-pro | Plan reviewer |
| **Atlas** | claude-sonnet-4-5 | 0.1 | primary | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Todo-list orchestrator |
| **Atlas** | claude-sonnet-4-6 | 0.1 | primary | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Todo-list orchestrator |
| **Prometheus** | claude-opus-4-6 | 0.1 | — | kimi-k2.5 → gpt-5.2 → gemini-3-pro | Strategic planner (internal) |
| **Sisyphus-Junior** | claude-sonnet-4-5 | 0.1 | all | user-configurable | Category-spawned executor |
| **Sisyphus-Junior** | claude-sonnet-4-6 | 0.1 | all | user-configurable | Category-spawned executor |
## TOOL RESTRICTIONS

View File

@ -28,7 +28,7 @@ const MODE: AgentMode = "subagent"
const BLOCKED_TOOLS = ["task"]
export const SISYPHUS_JUNIOR_DEFAULTS = {
model: "anthropic/claude-sonnet-4-5",
model: "anthropic/claude-sonnet-4-6",
temperature: 0.1,
} as const

View File

@ -203,7 +203,7 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
describe("useTaskSystem integration", () => {
test("useTaskSystem=true produces Task_Discipline prompt for Claude", () => {
//#given
const override = { model: "anthropic/claude-sonnet-4-5" }
const override = { model: "anthropic/claude-sonnet-4-6" }
//#when
const result = createSisyphusJuniorAgentWithOverrides(override, undefined, true)
@ -241,7 +241,7 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
test("useTaskSystem=true includes task_create/task_update in Claude prompt", () => {
//#given
const override = { model: "anthropic/claude-sonnet-4-5" }
const override = { model: "anthropic/claude-sonnet-4-6" }
//#when
const result = createSisyphusJuniorAgentWithOverrides(override, undefined, true)
@ -265,7 +265,7 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
test("useTaskSystem=false uses todowrite instead of task_create", () => {
//#given
const override = { model: "anthropic/claude-sonnet-4-5" }
const override = { model: "anthropic/claude-sonnet-4-6" }
//#when
const result = createSisyphusJuniorAgentWithOverrides(override, undefined, false)
@ -291,7 +291,7 @@ describe("createSisyphusJuniorAgentWithOverrides", () => {
test("Claude model uses default prompt with discipline section", () => {
// given
const override = { model: "anthropic/claude-sonnet-4-5" }
const override = { model: "anthropic/claude-sonnet-4-6" }
// when
const result = createSisyphusJuniorAgentWithOverrides(override)
@ -355,7 +355,7 @@ describe("getSisyphusJuniorPromptSource", () => {
test("returns 'default' for Claude models", () => {
// given
const model = "anthropic/claude-sonnet-4-5"
const model = "anthropic/claude-sonnet-4-6"
// when
const source = getSisyphusJuniorPromptSource(model)
@ -393,7 +393,7 @@ describe("buildSisyphusJuniorPrompt", () => {
test("Claude model prompt contains Claude-specific sections", () => {
// given
const model = "anthropic/claude-sonnet-4-5"
const model = "anthropic/claude-sonnet-4-6"
// when
const prompt = buildSisyphusJuniorPrompt(model, false)
@ -418,7 +418,7 @@ describe("buildSisyphusJuniorPrompt", () => {
test("useTaskSystem=false includes Todo_Discipline for Claude", () => {
// given
const model = "anthropic/claude-sonnet-4-5"
const model = "anthropic/claude-sonnet-4-6"
// when
const prompt = buildSisyphusJuniorPrompt(model, false)

View File

@ -34,7 +34,7 @@ describe("isGptModel", () => {
test("claude models are not gpt", () => {
expect(isGptModel("anthropic/claude-opus-4-6")).toBe(false);
expect(isGptModel("anthropic/claude-sonnet-4-5")).toBe(false);
expect(isGptModel("anthropic/claude-sonnet-4-6")).toBe(false);
expect(isGptModel("litellm/anthropic.claude-opus-4-5")).toBe(false);
});

View File

@ -54,7 +54,7 @@ describe("createBuiltinAgents with model overrides", () => {
test("Atlas uses uiSelectedModel when provided", async () => {
// #given
const fetchSpy = spyOn(shared, "fetchAvailableModels").mockResolvedValue(
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-5"])
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-6"])
)
const uiSelectedModel = "openai/gpt-5.2"
@ -84,7 +84,7 @@ describe("createBuiltinAgents with model overrides", () => {
test("user config model takes priority over uiSelectedModel for sisyphus", async () => {
// #given
const fetchSpy = spyOn(shared, "fetchAvailableModels").mockResolvedValue(
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-5"])
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-6"])
)
const uiSelectedModel = "openai/gpt-5.2"
const overrides = {
@ -117,7 +117,7 @@ describe("createBuiltinAgents with model overrides", () => {
test("user config model takes priority over uiSelectedModel for atlas", async () => {
// #given
const fetchSpy = spyOn(shared, "fetchAvailableModels").mockResolvedValue(
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-5"])
new Set(["openai/gpt-5.2", "anthropic/claude-sonnet-4-6"])
)
const uiSelectedModel = "openai/gpt-5.2"
const overrides = {

View File

@ -66,13 +66,13 @@ exports[`generateModelConfig single native provider uses Claude models when only
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -107,17 +107,17 @@ exports[`generateModelConfig single native provider uses Claude models when only
"variant": "max",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "anthropic/claude-opus-4-6",
"variant": "max",
},
"writing": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
},
}
@ -128,13 +128,13 @@ exports[`generateModelConfig single native provider uses Claude models with isMa
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -173,14 +173,14 @@ exports[`generateModelConfig single native provider uses Claude models with isMa
"variant": "max",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "anthropic/claude-opus-4-6",
"variant": "max",
},
"writing": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
},
}
@ -447,7 +447,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
@ -457,7 +457,7 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "medium",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -500,10 +500,10 @@ exports[`generateModelConfig all native providers uses preferred models from fal
"variant": "xhigh",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "google/gemini-3-pro",
@ -521,7 +521,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
@ -531,7 +531,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "medium",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -578,7 +578,7 @@ exports[`generateModelConfig all native providers uses preferred models with isM
"variant": "max",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "google/gemini-3-pro",
@ -649,10 +649,10 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models when on
"variant": "xhigh",
},
"unspecified-high": {
"model": "opencode/claude-sonnet-4-5",
"model": "opencode/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "opencode/claude-sonnet-4-5",
"model": "opencode/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro",
@ -727,7 +727,7 @@ exports[`generateModelConfig fallback providers uses OpenCode Zen models with is
"variant": "max",
},
"unspecified-low": {
"model": "opencode/claude-sonnet-4-5",
"model": "opencode/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro",
@ -745,7 +745,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"explore": {
"model": "github-copilot/gpt-5-mini",
@ -755,7 +755,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -798,10 +798,10 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models when
"variant": "xhigh",
},
"unspecified-high": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"unspecified-low": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
@ -819,7 +819,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"explore": {
"model": "github-copilot/gpt-5-mini",
@ -829,7 +829,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -876,7 +876,7 @@ exports[`generateModelConfig fallback providers uses GitHub Copilot models with
"variant": "max",
},
"unspecified-low": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
@ -1057,10 +1057,10 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + OpenCode Zen
"variant": "xhigh",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "opencode/gemini-3-pro",
@ -1078,7 +1078,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"explore": {
"model": "github-copilot/gpt-5-mini",
@ -1088,7 +1088,7 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "medium",
},
"librarian": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"metis": {
"model": "github-copilot/claude-opus-4.6",
@ -1131,10 +1131,10 @@ exports[`generateModelConfig mixed provider scenarios uses OpenAI + Copilot comb
"variant": "xhigh",
},
"unspecified-high": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"unspecified-low": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
@ -1152,7 +1152,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + ZAI combinat
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
@ -1193,16 +1193,16 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + ZAI combinat
"variant": "max",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "zai-coding-plan/glm-5",
},
"writing": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
},
}
@ -1213,13 +1213,13 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"$schema": "https://raw.githubusercontent.com/code-yeongyu/oh-my-opencode/master/assets/oh-my-opencode.schema.json",
"agents": {
"atlas": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"explore": {
"model": "anthropic/claude-haiku-4-5",
},
"librarian": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"metis": {
"model": "anthropic/claude-opus-4-6",
@ -1258,10 +1258,10 @@ exports[`generateModelConfig mixed provider scenarios uses Gemini + Claude combi
"variant": "high",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "google/gemini-3-pro",
@ -1332,10 +1332,10 @@ exports[`generateModelConfig mixed provider scenarios uses all fallback provider
"variant": "xhigh",
},
"unspecified-high": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"unspecified-low": {
"model": "github-copilot/claude-sonnet-4.5",
"model": "github-copilot/claude-sonnet-4.6",
},
"visual-engineering": {
"model": "github-copilot/gemini-3-pro-preview",
@ -1406,10 +1406,10 @@ exports[`generateModelConfig mixed provider scenarios uses all providers togethe
"variant": "xhigh",
},
"unspecified-high": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "google/gemini-3-pro",
@ -1484,7 +1484,7 @@ exports[`generateModelConfig mixed provider scenarios uses all providers with is
"variant": "max",
},
"unspecified-low": {
"model": "anthropic/claude-sonnet-4-5",
"model": "anthropic/claude-sonnet-4-6",
},
"visual-engineering": {
"model": "google/gemini-3-pro",

View File

@ -180,8 +180,8 @@ describe("config-manager ANTIGRAVITY_PROVIDER_CONFIG", () => {
const required = [
"antigravity-gemini-3-pro",
"antigravity-gemini-3-flash",
"antigravity-claude-sonnet-4-5",
"antigravity-claude-sonnet-4-5-thinking",
"antigravity-claude-sonnet-4-6",
"antigravity-claude-sonnet-4-6-thinking",
"antigravity-claude-opus-4-5-thinking",
]
@ -227,7 +227,7 @@ describe("config-manager ANTIGRAVITY_PROVIDER_CONFIG", () => {
const models = (ANTIGRAVITY_PROVIDER_CONFIG as any).google.models as Record<string, any>
// #when checking Claude thinking variants
const sonnetThinking = models["antigravity-claude-sonnet-4-5-thinking"]
const sonnetThinking = models["antigravity-claude-sonnet-4-6-thinking"]
const opusThinking = models["antigravity-claude-opus-4-5-thinking"]
// #then both should have low and max variants

View File

@ -36,13 +36,13 @@ export const ANTIGRAVITY_PROVIDER_CONFIG = {
high: { thinkingLevel: "high" },
},
},
"antigravity-claude-sonnet-4-5": {
name: "Claude Sonnet 4.5 (Antigravity)",
"antigravity-claude-sonnet-4-6": {
name: "Claude Sonnet 4.6 (Antigravity)",
limit: { context: 200000, output: 64000 },
modalities: { input: ["text", "image", "pdf"], output: ["text"] },
},
"antigravity-claude-sonnet-4-5-thinking": {
name: "Claude Sonnet 4.5 Thinking (Antigravity)",
"antigravity-claude-sonnet-4-6-thinking": {
name: "Claude Sonnet 4.6 Thinking (Antigravity)",
limit: { context: 200000, output: 64000 },
modalities: { input: ["text", "image", "pdf"], output: ["text"] },
variants: {

View File

@ -501,8 +501,8 @@ describe("generateModelConfig", () => {
// #when generateModelConfig is called
const result = generateModelConfig(config)
// #then librarian should use claude-sonnet-4-5 (third in fallback chain after ZAI and opencode/glm)
expect(result.agents?.librarian?.model).toBe("anthropic/claude-sonnet-4-5")
// #then librarian should use claude-sonnet-4-6 (third in fallback chain after ZAI and opencode/glm)
expect(result.agents?.librarian?.model).toBe("anthropic/claude-sonnet-4-6")
})
})

View File

@ -2,7 +2,7 @@ export function transformModelForProvider(provider: string, model: string): stri
if (provider === "github-copilot") {
return model
.replace("claude-opus-4-6", "claude-opus-4.6")
.replace("claude-sonnet-4-5", "claude-sonnet-4.5")
.replace("claude-sonnet-4-6", "claude-sonnet-4.6")
.replace("claude-haiku-4-5", "claude-haiku-4.5")
.replace("claude-sonnet-4", "claude-sonnet-4")
.replace("gemini-3-pro", "gemini-3-pro-preview")

View File

@ -34,7 +34,7 @@ export async function promptInstallConfig(detected: DetectedConfig): Promise<Ins
options: [
{ value: "no", label: "No", hint: "Will use opencode/glm-4.7-free as fallback" },
{ value: "yes", label: "Yes (standard)", hint: "Claude Opus 4.5 for orchestration" },
{ value: "max20", label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.5 for Librarian" },
{ value: "max20", label: "Yes (max20 mode)", hint: "Full power with Claude Sonnet 4.6 for Librarian" },
],
initialValue: initial.claude,
})

View File

@ -6,12 +6,12 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
test("should return model-specific limit when modelConcurrency is set", () => {
// given
const config: BackgroundTaskConfig = {
modelConcurrency: { "anthropic/claude-sonnet-4-5": 5 }
modelConcurrency: { "anthropic/claude-sonnet-4-6": 5 }
}
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(5)
@ -25,7 +25,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(3)
@ -40,7 +40,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(3)
@ -54,7 +54,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(2)
@ -65,7 +65,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager()
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(5)
@ -77,7 +77,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(5)
@ -86,14 +86,14 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
test("should prioritize model-specific over provider-specific over default", () => {
// given
const config: BackgroundTaskConfig = {
modelConcurrency: { "anthropic/claude-sonnet-4-5": 10 },
modelConcurrency: { "anthropic/claude-sonnet-4-6": 10 },
providerConcurrency: { anthropic: 5 },
defaultConcurrency: 2
}
const manager = new ConcurrencyManager(config)
// when
const modelLimit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const modelLimit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
const providerLimit = manager.getConcurrencyLimit("anthropic/claude-opus-4-6")
const defaultLimit = manager.getConcurrencyLimit("google/gemini-3-pro")
@ -137,7 +137,7 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(Infinity)
@ -146,12 +146,12 @@ describe("ConcurrencyManager.getConcurrencyLimit", () => {
test("should return Infinity when modelConcurrency is 0", () => {
// given
const config: BackgroundTaskConfig = {
modelConcurrency: { "anthropic/claude-sonnet-4-5": 0 }
modelConcurrency: { "anthropic/claude-sonnet-4-6": 0 }
}
const manager = new ConcurrencyManager(config)
// when
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-5")
const limit = manager.getConcurrencyLimit("anthropic/claude-sonnet-4-6")
// then
expect(limit).toBe(Infinity)
@ -327,16 +327,16 @@ describe("ConcurrencyManager.acquire/release", () => {
test("should use model-specific limit for acquire", async () => {
// given
const config: BackgroundTaskConfig = {
modelConcurrency: { "anthropic/claude-sonnet-4-5": 2 },
modelConcurrency: { "anthropic/claude-sonnet-4-6": 2 },
defaultConcurrency: 5
}
manager = new ConcurrencyManager(config)
await manager.acquire("anthropic/claude-sonnet-4-5")
await manager.acquire("anthropic/claude-sonnet-4-5")
await manager.acquire("anthropic/claude-sonnet-4-6")
await manager.acquire("anthropic/claude-sonnet-4-6")
// when
let resolved = false
const waitPromise = manager.acquire("anthropic/claude-sonnet-4-5").then(() => { resolved = true })
const waitPromise = manager.acquire("anthropic/claude-sonnet-4-6").then(() => { resolved = true })
// Give microtask queue a chance to run
await Promise.resolve()
@ -345,7 +345,7 @@ describe("ConcurrencyManager.acquire/release", () => {
expect(resolved).toBe(false)
// Cleanup
manager.release("anthropic/claude-sonnet-4-5")
manager.release("anthropic/claude-sonnet-4-6")
await waitPromise
})
})

View File

@ -827,7 +827,7 @@ describe("BackgroundManager.notifyParentSession - dynamic message lookup", () =>
{
info: {
agent: "compaction",
model: { providerID: "anthropic", modelID: "claude-sonnet-4-5" },
model: { providerID: "anthropic", modelID: "claude-sonnet-4-6" },
},
},
],

View File

@ -182,7 +182,7 @@ describe("TaskToastManager", () => {
description: "Task with system default model",
agent: "sisyphus-junior",
isBackground: false,
modelInfo: { model: "anthropic/claude-sonnet-4-5", type: "system-default" as const },
modelInfo: { model: "anthropic/claude-sonnet-4-6", type: "system-default" as const },
}
// when - addTask is called
@ -192,7 +192,7 @@ describe("TaskToastManager", () => {
expect(mockClient.tui.showToast).toHaveBeenCalled()
const call = mockClient.tui.showToast.mock.calls[0][0]
expect(call.body.message).toContain("[FALLBACK]")
expect(call.body.message).toContain("anthropic/claude-sonnet-4-5")
expect(call.body.message).toContain("anthropic/claude-sonnet-4-6")
expect(call.body.message).toContain("(system default fallback)")
})

View File

@ -4,11 +4,11 @@ import type { PluginInput } from "@opencode-ai/plugin"
const executeCompactMock = mock(async () => {})
const getLastAssistantMock = mock(async () => ({
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
}))
const parseAnthropicTokenLimitErrorMock = mock(() => ({
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
}))
mock.module("./executor", () => ({

View File

@ -144,10 +144,10 @@ describe("createAnthropicEffortHook", () => {
})
it("should NOT inject effort for non-opus model", async () => {
//#given claude-sonnet-4-5 (not opus)
//#given claude-sonnet-4-6 (not opus)
const hook = createAnthropicEffortHook()
const { input, output } = createMockParams({
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
})
//#when chat.params hook is called

View File

@ -21,7 +21,7 @@ function createMockInput(sessionID: string, messageID?: string): AutoSlashComman
sessionID,
messageID: messageID ?? `msg-${Date.now()}-${Math.random()}`,
agent: "test-agent",
model: { providerID: "anthropic", modelID: "claude-sonnet-4-5" },
model: { providerID: "anthropic", modelID: "claude-sonnet-4-6" },
}
}
@ -29,7 +29,7 @@ function createMockOutput(text: string): AutoSlashCommandHookOutput {
return {
message: {
agent: "test-agent",
model: { providerID: "anthropic", modelID: "claude-sonnet-4-5" },
model: { providerID: "anthropic", modelID: "claude-sonnet-4-6" },
path: { cwd: "/test", root: "/test" },
tools: {},
},

View File

@ -75,7 +75,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 50000,
@ -128,7 +128,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 170000,
@ -164,7 +164,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "google-vertex-anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 170000,
@ -202,7 +202,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: { input: 180000, output: 0, reasoning: 0, cache: { read: 10000, write: 0 } },
},
@ -241,7 +241,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 170000,
@ -282,7 +282,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 300000,
@ -321,7 +321,7 @@ describe("preemptive-compaction", () => {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
modelID: "claude-sonnet-4-6",
finish: true,
tokens: {
input: 300000,

View File

@ -83,7 +83,7 @@ describe("createThinkModeHook integration", () => {
const hook = createThinkModeHook()
const input = createMockInput(
"github-copilot",
"claude-sonnet-4-5",
"claude-sonnet-4-6",
"think about this"
)
@ -92,7 +92,7 @@ describe("createThinkModeHook integration", () => {
// then should upgrade to high variant
const message = input.message as MessageWithInjectedProps
expect(input.message.model?.modelID).toBe("claude-sonnet-4-5-high")
expect(input.message.model?.modelID).toBe("claude-sonnet-4-6-high")
expect(message.thinking).toBeDefined()
})
})
@ -201,7 +201,7 @@ describe("createThinkModeHook integration", () => {
const hook = createThinkModeHook()
const input = createMockInput(
"anthropic",
"claude-sonnet-4-5",
"claude-sonnet-4-6",
"think about this"
)
@ -210,7 +210,7 @@ describe("createThinkModeHook integration", () => {
// then should work as before
const message = input.message as MessageWithInjectedProps
expect(input.message.model?.modelID).toBe("claude-sonnet-4-5-high")
expect(input.message.model?.modelID).toBe("claude-sonnet-4-6-high")
expect(message.thinking).toBeDefined()
})
@ -272,7 +272,7 @@ describe("createThinkModeHook integration", () => {
const hook = createThinkModeHook()
const input = createMockInput(
"amazon-bedrock",
"claude-sonnet-4-5",
"claude-sonnet-4-6",
"think"
)
@ -281,7 +281,7 @@ describe("createThinkModeHook integration", () => {
// then should inject bedrock thinking config
const message = input.message as MessageWithInjectedProps
expect(input.message.model?.modelID).toBe("claude-sonnet-4-5-high")
expect(input.message.model?.modelID).toBe("claude-sonnet-4-6-high")
expect(message.reasoningConfig).toBeDefined()
})
})

View File

@ -30,7 +30,7 @@ describe("think-mode switcher", () => {
it("should resolve github-copilot Claude Sonnet to anthropic config", () => {
// given a github-copilot provider with Claude Sonnet model
const config = getThinkingConfig("github-copilot", "claude-sonnet-4-5")
const config = getThinkingConfig("github-copilot", "claude-sonnet-4-6")
// then should return anthropic thinking config
expect(config).not.toBeNull()
@ -295,7 +295,7 @@ describe("think-mode switcher", () => {
it("should still work for amazon-bedrock provider", () => {
// given amazon-bedrock provider with claude model
const config = getThinkingConfig("amazon-bedrock", "claude-sonnet-4-5")
const config = getThinkingConfig("amazon-bedrock", "claude-sonnet-4-6")
// then should return bedrock thinking config
expect(config).not.toBeNull()
@ -364,10 +364,10 @@ describe("think-mode switcher", () => {
describe("getHighVariant with prefixes", () => {
it("should preserve vertex_ai/ prefix when getting high variant", () => {
// given a model ID with vertex_ai/ prefix
const variant = getHighVariant("vertex_ai/claude-sonnet-4-5")
const variant = getHighVariant("vertex_ai/claude-sonnet-4-6")
// then should return high variant with prefix preserved
expect(variant).toBe("vertex_ai/claude-sonnet-4-5-high")
expect(variant).toBe("vertex_ai/claude-sonnet-4-6-high")
})
it("should preserve openai/ prefix when getting high variant", () => {
@ -389,7 +389,7 @@ describe("think-mode switcher", () => {
it("should handle multiple different prefixes", () => {
// given various custom prefixes
expect(getHighVariant("azure/gpt-5")).toBe("azure/gpt-5-high")
expect(getHighVariant("bedrock/claude-sonnet-4-5")).toBe("bedrock/claude-sonnet-4-5-high")
expect(getHighVariant("bedrock/claude-sonnet-4-6")).toBe("bedrock/claude-sonnet-4-6-high")
expect(getHighVariant("custom-llm/gemini-3-pro")).toBe("custom-llm/gemini-3-pro-high")
})
@ -430,7 +430,7 @@ describe("think-mode switcher", () => {
describe("getThinkingConfig with prefixes", () => {
it("should return null for custom providers (not in THINKING_CONFIGS)", () => {
// given custom provider with prefixed Claude model
const config = getThinkingConfig("dia-llm", "vertex_ai/claude-sonnet-4-5")
const config = getThinkingConfig("dia-llm", "vertex_ai/claude-sonnet-4-6")
// then should return null (custom provider not in THINKING_CONFIGS)
expect(config).toBeNull()
@ -459,13 +459,13 @@ describe("think-mode switcher", () => {
it("should handle LLM proxy with vertex_ai prefix correctly", () => {
// given a custom LLM proxy provider using vertex_ai/ prefix
const providerID = "dia-llm"
const modelID = "vertex_ai/claude-sonnet-4-5"
const modelID = "vertex_ai/claude-sonnet-4-6"
// when getting high variant
const highVariant = getHighVariant(modelID)
// then should preserve the prefix
expect(highVariant).toBe("vertex_ai/claude-sonnet-4-5-high")
expect(highVariant).toBe("vertex_ai/claude-sonnet-4-6-high")
// #and when checking if already high
expect(isAlreadyHighVariant(modelID)).toBe(false)

View File

@ -21,8 +21,8 @@
* Custom providers may use prefixes for routing (e.g., vertex_ai/, openai/).
*
* @example
* extractModelPrefix("vertex_ai/claude-sonnet-4-5") // { prefix: "vertex_ai/", base: "claude-sonnet-4-5" }
* extractModelPrefix("claude-sonnet-4-5") // { prefix: "", base: "claude-sonnet-4-5" }
* extractModelPrefix("vertex_ai/claude-sonnet-4-6") // { prefix: "vertex_ai/", base: "claude-sonnet-4-6" }
* extractModelPrefix("claude-sonnet-4-6") // { prefix: "", base: "claude-sonnet-4-6" }
* extractModelPrefix("openai/gpt-5.2") // { prefix: "openai/", base: "gpt-5.2" }
*/
function extractModelPrefix(modelID: string): { prefix: string; base: string } {
@ -87,7 +87,7 @@ function resolveProvider(providerID: string, modelID: string): string {
// For OpenAI models, this signals that reasoning_effort should be set to "high"
const HIGH_VARIANT_MAP: Record<string, string> = {
// Claude
"claude-sonnet-4-5": "claude-sonnet-4-5-high",
"claude-sonnet-4-6": "claude-sonnet-4-6-high",
"claude-opus-4-6": "claude-opus-4-6-high",
// Gemini
"gemini-3-pro": "gemini-3-pro-high",

View File

@ -1374,9 +1374,9 @@ describe("todo-continuation-enforcer", () => {
setMainSession(sessionID)
const mockMessagesWithCompaction = [
{ info: { id: "msg-1", role: "user", agent: "sisyphus", model: { providerID: "anthropic", modelID: "claude-sonnet-4-5" } } },
{ info: { id: "msg-2", role: "assistant", agent: "sisyphus", modelID: "claude-sonnet-4-5", providerID: "anthropic" } },
{ info: { id: "msg-3", role: "assistant", agent: "compaction", modelID: "claude-sonnet-4-5", providerID: "anthropic" } },
{ info: { id: "msg-1", role: "user", agent: "sisyphus", model: { providerID: "anthropic", modelID: "claude-sonnet-4-6" } } },
{ info: { id: "msg-2", role: "assistant", agent: "sisyphus", modelID: "claude-sonnet-4-6", providerID: "anthropic" } },
{ info: { id: "msg-3", role: "assistant", agent: "compaction", modelID: "claude-sonnet-4-6", providerID: "anthropic" } },
]
const mockInput = {

View File

@ -11,8 +11,8 @@ describe("Agent Config Integration", () => {
Sisyphus: { model: "anthropic/claude-opus-4-6" },
Atlas: { model: "anthropic/claude-opus-4-6" },
"Prometheus (Planner)": { model: "anthropic/claude-opus-4-6" },
"Metis (Plan Consultant)": { model: "anthropic/claude-sonnet-4-5" },
"Momus (Plan Reviewer)": { model: "anthropic/claude-sonnet-4-5" },
"Metis (Plan Consultant)": { model: "anthropic/claude-sonnet-4-6" },
"Momus (Plan Reviewer)": { model: "anthropic/claude-sonnet-4-6" },
}
// when - migration is applied

View File

@ -47,7 +47,7 @@ describe("updateConnectedProvidersCache", () => {
env: [],
models: {
"claude-opus-4-6": { id: "claude-opus-4-6", name: "Claude Opus 4.6" },
"claude-sonnet-4-5": { id: "claude-sonnet-4-5", name: "Claude Sonnet 4.5" },
"claude-sonnet-4-6": { id: "claude-sonnet-4-6", name: "Claude Sonnet 4.6" },
},
},
],
@ -65,7 +65,7 @@ describe("updateConnectedProvidersCache", () => {
expect(cache!.connected).toEqual(["openai", "anthropic"])
expect(cache!.models).toEqual({
openai: ["gpt-5.3-codex", "gpt-5.2"],
anthropic: ["claude-opus-4-6", "claude-sonnet-4-5"],
anthropic: ["claude-opus-4-6", "claude-sonnet-4-6"],
})
})

View File

@ -782,7 +782,7 @@ describe("migrateAgentConfigToCategory", () => {
{ model: "openai/gpt-5.2" },
{ model: "anthropic/claude-haiku-4-5" },
{ model: "anthropic/claude-opus-4-6" },
{ model: "anthropic/claude-sonnet-4-5" },
{ model: "anthropic/claude-sonnet-4-6" },
]
const expectedCategories = ["visual-engineering", "writing", "ultrabrain", "quick", "unspecified-high", "unspecified-low"]

View File

@ -17,7 +17,7 @@ export const MODEL_TO_CATEGORY_MAP: Record<string, string> = {
"openai/gpt-5.2": "ultrabrain",
"anthropic/claude-haiku-4-5": "quick",
"anthropic/claude-opus-4-6": "unspecified-high",
"anthropic/claude-sonnet-4-5": "unspecified-low",
"anthropic/claude-sonnet-4-6": "unspecified-low",
}
export function migrateAgentConfigToCategory(config: Record<string, unknown>): {

View File

@ -8,6 +8,7 @@
export const MODEL_VERSION_MAP: Record<string, string> = {
"openai/gpt-5.2-codex": "openai/gpt-5.3-codex",
"anthropic/claude-opus-4-5": "anthropic/claude-opus-4-6",
"anthropic/claude-sonnet-4-5": "anthropic/claude-sonnet-4-6",
}
function migrationKey(oldModel: string, newModel: string): string {

View File

@ -164,7 +164,7 @@ describe("fetchAvailableModels", () => {
it("#given cache file with various providers #when fetchAvailableModels called with all providers #then extracts all IDs correctly", async () => {
writeModelsCache({
openai: { id: "openai", models: { "gpt-5.3-codex": { id: "gpt-5.3-codex" } } },
anthropic: { id: "anthropic", models: { "claude-sonnet-4-5": { id: "claude-sonnet-4-5" } } },
anthropic: { id: "anthropic", models: { "claude-sonnet-4-6": { id: "claude-sonnet-4-6" } } },
google: { id: "google", models: { "gemini-3-flash": { id: "gemini-3-flash" } } },
opencode: { id: "opencode", models: { "gpt-5-nano": { id: "gpt-5-nano" } } },
})
@ -175,7 +175,7 @@ describe("fetchAvailableModels", () => {
expect(result.size).toBe(4)
expect(result.has("openai/gpt-5.3-codex")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-6")).toBe(true)
expect(result.has("google/gemini-3-flash")).toBe(true)
expect(result.has("opencode/gpt-5-nano")).toBe(true)
})
@ -239,7 +239,7 @@ describe("fuzzyMatchModel", () => {
it("should match claude-opus to claude-opus-4-6", () => {
const available = new Set([
"anthropic/claude-opus-4-6",
"anthropic/claude-sonnet-4-5",
"anthropic/claude-sonnet-4-6",
])
const result = fuzzyMatchModel("claude-opus", available)
expect(result).toBe("anthropic/claude-opus-4-6")
@ -697,7 +697,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
})
writeModelsCache({
opencode: { models: { "glm-4.7-free": {}, "gpt-5-nano": {}, "gpt-5.2": {} } },
anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-5": {} } }
anthropic: { models: { "claude-opus-4-6": {}, "claude-sonnet-4-6": {} } }
})
const result = await fetchAvailableModels(undefined, {
@ -709,7 +709,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
expect(result.has("opencode/gpt-5-nano")).toBe(true)
expect(result.has("anthropic/claude-opus-4-6")).toBe(true)
expect(result.has("opencode/gpt-5.2")).toBe(false)
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(false)
expect(result.has("anthropic/claude-sonnet-4-6")).toBe(false)
})
// given provider-models cache exists but has no models (API failure)
@ -797,7 +797,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
it("should handle mixed string[] and object[] formats across providers", async () => {
writeProviderModelsCache({
models: {
anthropic: ["claude-opus-4-6", "claude-sonnet-4-5"],
anthropic: ["claude-opus-4-6", "claude-sonnet-4-6"],
ollama: [
{ id: "ministral-3:14b-32k-agent", provider: "ollama" },
{ id: "qwen3-coder:32k-agent", provider: "ollama" }
@ -812,7 +812,7 @@ describe("fetchAvailableModels with provider-models cache (whitelist-filtered)",
expect(result.size).toBe(4)
expect(result.has("anthropic/claude-opus-4-6")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(true)
expect(result.has("anthropic/claude-sonnet-4-6")).toBe(true)
expect(result.has("ollama/ministral-3:14b-32k-agent")).toBe(true)
expect(result.has("ollama/qwen3-coder:32k-agent")).toBe(true)
})

View File

@ -284,18 +284,18 @@ describe("CATEGORY_MODEL_REQUIREMENTS", () => {
expect(primary.providers[0]).toBe("anthropic")
})
test("unspecified-low has valid fallbackChain with claude-sonnet-4-5 as primary", () => {
test("unspecified-low has valid fallbackChain with claude-sonnet-4-6 as primary", () => {
// given - unspecified-low category requirement
const unspecifiedLow = CATEGORY_MODEL_REQUIREMENTS["unspecified-low"]
// when - accessing unspecified-low requirement
// then - fallbackChain exists with claude-sonnet-4-5 as first entry
// then - fallbackChain exists with claude-sonnet-4-6 as first entry
expect(unspecifiedLow).toBeDefined()
expect(unspecifiedLow.fallbackChain).toBeArray()
expect(unspecifiedLow.fallbackChain.length).toBeGreaterThan(0)
const primary = unspecifiedLow.fallbackChain[0]
expect(primary.model).toBe("claude-sonnet-4-5")
expect(primary.model).toBe("claude-sonnet-4-6")
expect(primary.providers[0]).toBe("anthropic")
})
@ -336,7 +336,7 @@ describe("CATEGORY_MODEL_REQUIREMENTS", () => {
const writing = CATEGORY_MODEL_REQUIREMENTS["writing"]
// when - accessing writing requirement
// then - fallbackChain: k2p5 → gemini-3-flash → claude-sonnet-4-5
// then - fallbackChain: k2p5 → gemini-3-flash → claude-sonnet-4-6
expect(writing).toBeDefined()
expect(writing.fallbackChain).toBeArray()
expect(writing.fallbackChain).toHaveLength(3)

View File

@ -40,7 +40,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["opencode"], model: "glm-4.7-free" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
],
},
explore: {
@ -90,7 +90,7 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["opencode"], model: "kimi-k2.5-free" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
@ -138,7 +138,7 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
},
"unspecified-low": {
fallbackChain: [
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.3-codex", variant: "medium" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
],
@ -154,7 +154,7 @@ export const CATEGORY_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
fallbackChain: [
{ providers: ["kimi-for-coding"], model: "k2p5" },
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-sonnet-4-6" },
],
},
}

View File

@ -394,9 +394,9 @@ describe("resolveModelWithFallback", () => {
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "glm-4.7" },
{ providers: ["anthropic"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic"], model: "claude-sonnet-4-6" },
],
availableModels: new Set(["opencode/glm-4.7", "anthropic/claude-sonnet-4-5"]),
availableModels: new Set(["opencode/glm-4.7", "anthropic/claude-sonnet-4-6"]),
systemDefaultModel: "google/gemini-3-pro",
}
@ -454,9 +454,9 @@ describe("resolveModelWithFallback", () => {
const input: ExtendedModelResolutionInput = {
fallbackChain: [
{ providers: ["zai-coding-plan"], model: "nonexistent-model" },
{ providers: ["anthropic"], model: "claude-sonnet-4-5" },
{ providers: ["anthropic"], model: "claude-sonnet-4-6" },
],
availableModels: new Set(["anthropic/claude-sonnet-4-5"]),
availableModels: new Set(["anthropic/claude-sonnet-4-6"]),
systemDefaultModel: "google/gemini-3-pro",
}
@ -464,7 +464,7 @@ describe("resolveModelWithFallback", () => {
const result = resolveModelWithFallback(input)
// then - should fall through to second entry
expect(result!.model).toBe("anthropic/claude-sonnet-4-5")
expect(result!.model).toBe("anthropic/claude-sonnet-4-6")
expect(result!.source).toBe("provider-fallback")
})
})
@ -536,7 +536,7 @@ describe("resolveModelWithFallback", () => {
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
availableModels: new Set(),
systemDefaultModel: "anthropic/claude-sonnet-4-5",
systemDefaultModel: "anthropic/claude-sonnet-4-6",
}
// when
@ -714,7 +714,7 @@ describe("resolveModelWithFallback", () => {
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-pro" },
],
availableModels: new Set(["google/gemini-3-pro-preview", "anthropic/claude-opus-4-6"]),
systemDefaultModel: "anthropic/claude-sonnet-4-5",
systemDefaultModel: "anthropic/claude-sonnet-4-6",
}
// when
@ -733,7 +733,7 @@ describe("resolveModelWithFallback", () => {
{ providers: ["google"], model: "gemini-3-pro" },
],
availableModels: new Set(["google/gemini-3-pro", "google/gemini-3-pro-preview"]),
systemDefaultModel: "anthropic/claude-sonnet-4-5",
systemDefaultModel: "anthropic/claude-sonnet-4-6",
}
// when
@ -789,7 +789,7 @@ describe("resolveModelWithFallback", () => {
const input: ExtendedModelResolutionInput = {
categoryDefaultModel: "google/gemini-3-pro",
availableModels: new Set(),
systemDefaultModel: "anthropic/claude-sonnet-4-5",
systemDefaultModel: "anthropic/claude-sonnet-4-6",
}
// when

View File

@ -10,7 +10,7 @@ describe("parseModelSuggestion", () => {
data: {
providerID: "anthropic",
modelID: "claude-sonet-4",
suggestions: ["claude-sonnet-4", "claude-sonnet-4-5"],
suggestions: ["claude-sonnet-4", "claude-sonnet-4-6"],
},
}
@ -115,7 +115,7 @@ describe("parseModelSuggestion", () => {
it("should parse suggestion from error message string", () => {
// given an Error with model-not-found message and suggestion
const error = new Error(
"Model not found: anthropic/claude-sonet-4. Did you mean: claude-sonnet-4, claude-sonnet-4-5?"
"Model not found: anthropic/claude-sonet-4. Did you mean: claude-sonnet-4, claude-sonnet-4-6?"
)
// when parsing the error

View File

@ -97,7 +97,7 @@
| deep | gpt-5.3-codex medium | Autonomous problem-solving |
| artistry | gemini-3-pro high | Creative approaches |
| quick | claude-haiku-4-5 | Trivial tasks |
| unspecified-low | claude-sonnet-4-5 | Moderate effort |
| unspecified-low | claude-sonnet-4-6 | Moderate effort |
| unspecified-high | claude-opus-4-6 max | High effort |
| writing | kimi-k2p5 | Documentation |

View File

@ -40,7 +40,7 @@ describe("resolveCategoryExecution", () => {
}
const executorCtx = createMockExecutorContext()
const inheritedModel = undefined
const systemDefaultModel = "anthropic/claude-sonnet-4-5"
const systemDefaultModel = "anthropic/claude-sonnet-4-6"
//#when
const result = await resolveCategoryExecution(args, executorCtx, inheritedModel, systemDefaultModel)
@ -65,7 +65,7 @@ describe("resolveCategoryExecution", () => {
}
const executorCtx = createMockExecutorContext()
const inheritedModel = undefined
const systemDefaultModel = "anthropic/claude-sonnet-4-5"
const systemDefaultModel = "anthropic/claude-sonnet-4-6"
//#when
const result = await resolveCategoryExecution(args, executorCtx, inheritedModel, systemDefaultModel)

View File

@ -112,7 +112,7 @@ Available categories: ${allCategoryNames}`,
modelInfo: undefined,
actualModel: undefined,
isUnstableAgent: false,
error: `Invalid model format "${actualModel}". Expected "provider/model" format (e.g., "anthropic/claude-sonnet-4-5").`,
error: `Invalid model format "${actualModel}". Expected "provider/model" format (e.g., "anthropic/claude-sonnet-4-6").`,
}
}

View File

@ -125,7 +125,7 @@ This is NOT a default choice - it's for genuinely unclassifiable moderate-effort
</Category_Context>
<Caller_Warning>
THIS CATEGORY USES A MID-TIER MODEL (claude-sonnet-4-5).
THIS CATEGORY USES A MID-TIER MODEL (claude-sonnet-4-6).
**PROVIDE CLEAR STRUCTURE:**
1. MUST DO: Enumerate required actions explicitly
@ -213,7 +213,7 @@ export const DEFAULT_CATEGORIES: Record<string, CategoryConfig> = {
deep: { model: "openai/gpt-5.3-codex", variant: "medium" },
artistry: { model: "google/gemini-3-pro", variant: "high" },
quick: { model: "anthropic/claude-haiku-4-5" },
"unspecified-low": { model: "anthropic/claude-sonnet-4-5" },
"unspecified-low": { model: "anthropic/claude-sonnet-4-6" },
"unspecified-high": { model: "anthropic/claude-opus-4-6", variant: "max" },
writing: { model: "kimi-for-coding/k2p5" },
}

View File

@ -10,12 +10,12 @@ import { __setTimingConfig, __resetTimingConfig } from "./timing"
import * as connectedProvidersCache from "../../shared/connected-providers-cache"
import * as executor from "./executor"
const SYSTEM_DEFAULT_MODEL = "anthropic/claude-sonnet-4-5"
const SYSTEM_DEFAULT_MODEL = "anthropic/claude-sonnet-4-6"
const TEST_CONNECTED_PROVIDERS = ["anthropic", "google", "openai"]
const TEST_AVAILABLE_MODELS = new Set([
"anthropic/claude-opus-4-6",
"anthropic/claude-sonnet-4-5",
"anthropic/claude-sonnet-4-6",
"anthropic/claude-haiku-4-5",
"google/gemini-3-pro",
"google/gemini-3-flash",
@ -51,7 +51,7 @@ describe("sisyphus-task", () => {
cacheSpy = spyOn(connectedProvidersCache, "readConnectedProvidersCache").mockReturnValue(["anthropic", "google", "openai"])
providerModelsSpy = spyOn(connectedProvidersCache, "readProviderModelsCache").mockReturnValue({
models: {
anthropic: ["claude-opus-4-6", "claude-sonnet-4-5", "claude-haiku-4-5"],
anthropic: ["claude-opus-4-6", "claude-sonnet-4-6", "claude-haiku-4-5"],
google: ["gemini-3-pro", "gemini-3-flash"],
openai: ["gpt-5.2", "gpt-5.3-codex"],
},
@ -2251,7 +2251,7 @@ describe("sisyphus-task", () => {
)
// then - model should be anthropic/claude-haiku-4-5 from DEFAULT_CATEGORIES
// NOT anthropic/claude-sonnet-4-5 (system default)
// NOT anthropic/claude-sonnet-4-6 (system default)
expect(launchInput.model.providerID).toBe("anthropic")
expect(launchInput.model.modelID).toBe("claude-haiku-4-5")
})
@ -2352,7 +2352,7 @@ describe("sisyphus-task", () => {
const tool = createDelegateTask({
manager: mockManager,
client: mockClient,
sisyphusJuniorModel: "anthropic/claude-sonnet-4-5",
sisyphusJuniorModel: "anthropic/claude-sonnet-4-6",
connectedProvidersOverride: TEST_CONNECTED_PROVIDERS,
availableModelsOverride: createTestAvailableModels(),
})
@ -2378,7 +2378,7 @@ describe("sisyphus-task", () => {
// then - override model should be used instead of category model
expect(launchInput.model.providerID).toBe("anthropic")
expect(launchInput.model.modelID).toBe("claude-sonnet-4-5")
expect(launchInput.model.modelID).toBe("claude-sonnet-4-6")
})
test("explicit category model takes precedence over sisyphus-junior model", async () => {
@ -2414,7 +2414,7 @@ describe("sisyphus-task", () => {
const tool = createDelegateTask({
manager: mockManager,
client: mockClient,
sisyphusJuniorModel: "anthropic/claude-sonnet-4-5",
sisyphusJuniorModel: "anthropic/claude-sonnet-4-6",
userCategories: {
ultrabrain: { model: "openai/gpt-5.3-codex" },
},
@ -2478,7 +2478,7 @@ describe("sisyphus-task", () => {
const tool = createDelegateTask({
manager: mockManager,
client: mockClient,
sisyphusJuniorModel: "anthropic/claude-sonnet-4-5",
sisyphusJuniorModel: "anthropic/claude-sonnet-4-6",
connectedProvidersOverride: TEST_CONNECTED_PROVIDERS,
availableModelsOverride: createTestAvailableModels(),
})
@ -2504,7 +2504,7 @@ describe("sisyphus-task", () => {
// then - sisyphus-junior override model should be used, not category default
expect(launchInput.model.providerID).toBe("anthropic")
expect(launchInput.model.modelID).toBe("claude-sonnet-4-5")
expect(launchInput.model.modelID).toBe("claude-sonnet-4-6")
})
test("sisyphus-junior model override works with user-defined category (#1295)", async () => {
@ -2886,7 +2886,7 @@ describe("sisyphus-task", () => {
// then - default model from DEFAULT_CATEGORIES is used
expect(resolved).not.toBeNull()
expect(resolved!.config.model).toBe("anthropic/claude-sonnet-4-5")
expect(resolved!.config.model).toBe("anthropic/claude-sonnet-4-6")
})
test("category built-in model takes precedence over inheritedModel for builtin category", () => {
@ -2966,7 +2966,7 @@ describe("sisyphus-task", () => {
// given a custom category with no default model
const categoryName = "custom-no-default"
const userCategories = { "custom-no-default": { temperature: 0.5 } } as unknown as Record<string, CategoryConfig>
const systemDefaultModel = "anthropic/claude-sonnet-4-5"
const systemDefaultModel = "anthropic/claude-sonnet-4-6"
// when no inheritedModel is provided, only systemDefaultModel
const resolved = resolveCategoryConfig(categoryName, {
@ -2976,7 +2976,7 @@ describe("sisyphus-task", () => {
// then systemDefaultModel should be returned
expect(resolved).not.toBeNull()
expect(resolved!.model).toBe("anthropic/claude-sonnet-4-5")
expect(resolved!.model).toBe("anthropic/claude-sonnet-4-6")
})
test("FIXED: userConfig.model always takes priority over everything", () => {
@ -2984,7 +2984,7 @@ describe("sisyphus-task", () => {
const categoryName = "ultrabrain"
const userCategories = { "ultrabrain": { model: "custom/user-model" } }
const inheritedModel = "anthropic/claude-opus-4-6"
const systemDefaultModel = "anthropic/claude-sonnet-4-5"
const systemDefaultModel = "anthropic/claude-sonnet-4-6"
// when resolveCategoryConfig is called with all sources
const resolved = resolveCategoryConfig(categoryName, {
@ -3032,7 +3032,7 @@ describe("sisyphus-task", () => {
const categoryName = "my-custom"
// Using type assertion since we're testing fallback behavior for categories without model
const userCategories = { "my-custom": { temperature: 0.5 } } as unknown as Record<string, CategoryConfig>
const systemDefaultModel = "anthropic/claude-sonnet-4-5"
const systemDefaultModel = "anthropic/claude-sonnet-4-6"
// when
const resolved = resolveCategoryConfig(categoryName, { userCategories, systemDefaultModel })
@ -3801,7 +3801,7 @@ describe("sisyphus-task", () => {
manager: mockManager,
client: mockClient,
userCategories: {
"sisyphus-junior": { model: "anthropic/claude-sonnet-4-5" },
"sisyphus-junior": { model: "anthropic/claude-sonnet-4-6" },
},
})