From 54b4844d3febbe8a7b94808185602632a0ef995b Mon Sep 17 00:00:00 2001 From: justsisyphus Date: Thu, 22 Jan 2026 22:43:27 +0900 Subject: [PATCH] refactor(shared): improve model resolver with 3-step resolution Implement resolveModelWithFallback() that tries: 1) user override, 2) fuzzy match from requirements chain against available models, 3) system default. Export new model utilities from shared index. Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode) Co-authored-by: Sisyphus --- src/shared/index.ts | 2 + src/shared/model-resolver.test.ts | 432 +++++++++++++++++++++++++++--- src/shared/model-resolver.ts | 100 +++++-- 3 files changed, 475 insertions(+), 59 deletions(-) diff --git a/src/shared/index.ts b/src/shared/index.ts index fef890e3..ec775ef3 100644 --- a/src/shared/index.ts +++ b/src/shared/index.ts @@ -26,4 +26,6 @@ export * from "./session-cursor" export * from "./shell-env" export * from "./system-directive" export * from "./agent-tool-restrictions" +export * from "./model-requirements" export * from "./model-resolver" +export * from "./model-availability" diff --git a/src/shared/model-resolver.test.ts b/src/shared/model-resolver.test.ts index d984be29..70ee4605 100644 --- a/src/shared/model-resolver.test.ts +++ b/src/shared/model-resolver.test.ts @@ -1,5 +1,6 @@ -import { describe, expect, test } from "bun:test"; -import { resolveModel, type ModelResolutionInput } from "./model-resolver"; +import { describe, expect, test, spyOn, beforeEach, afterEach } from "bun:test" +import { resolveModel, resolveModelWithFallback, type ModelResolutionInput, type ExtendedModelResolutionInput, type ModelResolutionResult, type ModelSource } from "./model-resolver" +import * as logger from "./logger" describe("resolveModel", () => { describe("priority chain", () => { @@ -9,14 +10,14 @@ describe("resolveModel", () => { userModel: "anthropic/claude-opus-4-5", inheritedModel: "openai/gpt-5.2", systemDefault: "google/gemini-3-pro", - }; + } // #when - const result = resolveModel(input); + const result = resolveModel(input) // #then - expect(result).toBe("anthropic/claude-opus-4-5"); - }); + expect(result).toBe("anthropic/claude-opus-4-5") + }) test("returns inheritedModel when userModel is undefined", () => { // #given @@ -24,14 +25,14 @@ describe("resolveModel", () => { userModel: undefined, inheritedModel: "openai/gpt-5.2", systemDefault: "google/gemini-3-pro", - }; + } // #when - const result = resolveModel(input); + const result = resolveModel(input) // #then - expect(result).toBe("openai/gpt-5.2"); - }); + expect(result).toBe("openai/gpt-5.2") + }) test("returns systemDefault when both userModel and inheritedModel are undefined", () => { // #given @@ -39,15 +40,15 @@ describe("resolveModel", () => { userModel: undefined, inheritedModel: undefined, systemDefault: "google/gemini-3-pro", - }; + } // #when - const result = resolveModel(input); + const result = resolveModel(input) // #then - expect(result).toBe("google/gemini-3-pro"); - }); - }); + expect(result).toBe("google/gemini-3-pro") + }) + }) describe("empty string handling", () => { test("treats empty string as unset, uses fallback", () => { @@ -56,14 +57,14 @@ describe("resolveModel", () => { userModel: "", inheritedModel: "openai/gpt-5.2", systemDefault: "google/gemini-3-pro", - }; + } // #when - const result = resolveModel(input); + const result = resolveModel(input) // #then - expect(result).toBe("openai/gpt-5.2"); - }); + expect(result).toBe("openai/gpt-5.2") + }) test("treats whitespace-only string as unset, uses fallback", () => { // #given @@ -71,15 +72,15 @@ describe("resolveModel", () => { userModel: " ", inheritedModel: "", systemDefault: "google/gemini-3-pro", - }; + } // #when - const result = resolveModel(input); + const result = resolveModel(input) // #then - expect(result).toBe("google/gemini-3-pro"); - }); - }); + expect(result).toBe("google/gemini-3-pro") + }) + }) describe("purity", () => { test("same input returns same output (referential transparency)", () => { @@ -88,14 +89,383 @@ describe("resolveModel", () => { userModel: "anthropic/claude-opus-4-5", inheritedModel: "openai/gpt-5.2", systemDefault: "google/gemini-3-pro", - }; + } // #when - const result1 = resolveModel(input); - const result2 = resolveModel(input); + const result1 = resolveModel(input) + const result2 = resolveModel(input) // #then - expect(result1).toBe(result2); - }); - }); -}); + expect(result1).toBe(result2) + }) + }) +}) + +describe("resolveModelWithFallback", () => { + let logSpy: ReturnType + + beforeEach(() => { + logSpy = spyOn(logger, "log") + }) + + afterEach(() => { + logSpy.mockRestore() + }) + + describe("Step 1: Override", () => { + test("returns userModel with override source when userModel is provided", () => { + // #given + const input: ExtendedModelResolutionInput = { + userModel: "anthropic/claude-opus-4-5", + fallbackChain: [ + { providers: ["anthropic", "github-copilot"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5", "github-copilot/claude-opus-4-5-preview"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("anthropic/claude-opus-4-5") + expect(result.source).toBe("override") + expect(logSpy).toHaveBeenCalledWith("Model resolved via override", { model: "anthropic/claude-opus-4-5" }) + }) + + test("override takes priority even if model not in availableModels", () => { + // #given + const input: ExtendedModelResolutionInput = { + userModel: "custom/my-model", + fallbackChain: [ + { providers: ["anthropic"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("custom/my-model") + expect(result.source).toBe("override") + }) + + test("whitespace-only userModel is treated as not provided", () => { + // #given + const input: ExtendedModelResolutionInput = { + userModel: " ", + fallbackChain: [ + { providers: ["anthropic"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.source).not.toBe("override") + }) + + test("empty string userModel is treated as not provided", () => { + // #given + const input: ExtendedModelResolutionInput = { + userModel: "", + fallbackChain: [ + { providers: ["anthropic"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.source).not.toBe("override") + }) + }) + + describe("Step 2: Provider fallback chain", () => { + test("tries providers in order within entry and returns first match", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(["github-copilot/claude-opus-4-5-preview", "opencode/claude-opus-4-7"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("github-copilot/claude-opus-4-5-preview") + expect(result.source).toBe("provider-fallback") + expect(logSpy).toHaveBeenCalledWith("Model resolved via fallback chain", { + provider: "github-copilot", + model: "claude-opus-4-5", + match: "github-copilot/claude-opus-4-5-preview", + }) + }) + + test("respects provider priority order within entry", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["openai", "anthropic", "google"], model: "gpt-5.2" }, + ], + availableModels: new Set(["openai/gpt-5.2", "anthropic/claude-opus-4-5", "google/gemini-3-pro"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("openai/gpt-5.2") + expect(result.source).toBe("provider-fallback") + }) + + test("tries next provider when first provider has no match", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic", "opencode", "github-copilot"], model: "grok-code" }, + ], + availableModels: new Set(["opencode/grok-code", "github-copilot/grok-code-preview"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("opencode/grok-code") + expect(result.source).toBe("provider-fallback") + }) + + test("uses fuzzy matching within provider", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic", "github-copilot"], model: "claude-opus" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5", "github-copilot/claude-opus-4-5-preview"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("anthropic/claude-opus-4-5") + expect(result.source).toBe("provider-fallback") + }) + + test("skips fallback chain when not provided", () => { + // #given + const input: ExtendedModelResolutionInput = { + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.source).toBe("system-default") + }) + + test("skips fallback chain when empty", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [], + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.source).toBe("system-default") + }) + + test("case-insensitive fuzzy matching", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic"], model: "CLAUDE-OPUS" }, + ], + availableModels: new Set(["anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("anthropic/claude-opus-4-5") + expect(result.source).toBe("provider-fallback") + }) + }) + + describe("Step 3: System default", () => { + test("returns systemDefaultModel with system-default source when nothing matches", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic"], model: "nonexistent-model" }, + ], + availableModels: new Set(["openai/gpt-5.2", "anthropic/claude-opus-4-5"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("google/gemini-3-pro") + expect(result.source).toBe("system-default") + expect(logSpy).toHaveBeenCalledWith("Model resolved via system default", { model: "google/gemini-3-pro" }) + }) + + test("returns system default when availableModels is empty", () => { + // #given + const input: ExtendedModelResolutionInput = { + fallbackChain: [ + { providers: ["anthropic"], model: "claude-opus-4-5" }, + ], + availableModels: new Set(), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("google/gemini-3-pro") + expect(result.source).toBe("system-default") + }) + + test("returns system default when fallbackChain is not provided", () => { + // #given + const input: ExtendedModelResolutionInput = { + availableModels: new Set(["openai/gpt-5.2"]), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result = resolveModelWithFallback(input) + + // #then + expect(result.model).toBe("google/gemini-3-pro") + expect(result.source).toBe("system-default") + }) + }) + + describe("Multi-entry fallbackChain", () => { + test("resolves to claude-opus when OpenAI unavailable but Anthropic available (oracle scenario)", () => { + // #given + const availableModels = new Set(["anthropic/claude-opus-4-5"]) + + // #when + const result = resolveModelWithFallback({ + fallbackChain: [ + { providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2", variant: "high" }, + { providers: ["anthropic", "github-copilot", "opencode"], model: "claude-opus-4-5", variant: "max" }, + ], + availableModels, + systemDefaultModel: "system/default", + }) + + // #then + expect(result.model).toBe("anthropic/claude-opus-4-5") + expect(result.source).toBe("provider-fallback") + }) + + test("tries all providers in first entry before moving to second entry", () => { + // #given + const availableModels = new Set(["google/gemini-3-pro-preview"]) + + // #when + const result = resolveModelWithFallback({ + fallbackChain: [ + { providers: ["openai", "anthropic"], model: "gpt-5.2" }, + { providers: ["google"], model: "gemini-3-pro-preview" }, + ], + availableModels, + systemDefaultModel: "system/default", + }) + + // #then + expect(result.model).toBe("google/gemini-3-pro-preview") + expect(result.source).toBe("provider-fallback") + }) + + test("returns first matching entry even if later entries have better matches", () => { + // #given + const availableModels = new Set([ + "openai/gpt-5.2", + "anthropic/claude-opus-4-5", + ]) + + // #when + const result = resolveModelWithFallback({ + fallbackChain: [ + { providers: ["openai"], model: "gpt-5.2" }, + { providers: ["anthropic"], model: "claude-opus-4-5" }, + ], + availableModels, + systemDefaultModel: "system/default", + }) + + // #then + expect(result.model).toBe("openai/gpt-5.2") + expect(result.source).toBe("provider-fallback") + }) + + test("falls through all entries to system default when none match", () => { + // #given + const availableModels = new Set(["other/model"]) + + // #when + const result = resolveModelWithFallback({ + fallbackChain: [ + { providers: ["openai"], model: "gpt-5.2" }, + { providers: ["anthropic"], model: "claude-opus-4-5" }, + { providers: ["google"], model: "gemini-3-pro" }, + ], + availableModels, + systemDefaultModel: "system/default", + }) + + // #then + expect(result.model).toBe("system/default") + expect(result.source).toBe("system-default") + }) + }) + + describe("Type safety", () => { + test("result has correct ModelResolutionResult shape", () => { + // #given + const input: ExtendedModelResolutionInput = { + userModel: "anthropic/claude-opus-4-5", + availableModels: new Set(), + systemDefaultModel: "google/gemini-3-pro", + } + + // #when + const result: ModelResolutionResult = resolveModelWithFallback(input) + + // #then + expect(typeof result.model).toBe("string") + expect(["override", "provider-fallback", "system-default"]).toContain(result.source) + }) + }) +}) diff --git a/src/shared/model-resolver.ts b/src/shared/model-resolver.ts index 2e67f85d..1b1fd739 100644 --- a/src/shared/model-resolver.ts +++ b/src/shared/model-resolver.ts @@ -1,35 +1,79 @@ -/** - * Input for model resolution. - * All model strings are optional except systemDefault which is the terminal fallback. - */ +import { log } from "./logger" +import { fuzzyMatchModel } from "./model-availability" +import type { FallbackEntry } from "./model-requirements" + export type ModelResolutionInput = { - /** Model from user category config */ - userModel?: string; - /** Model inherited from parent task/session */ - inheritedModel?: string; - /** System default model from OpenCode config - always required */ - systemDefault: string; -}; + userModel?: string + inheritedModel?: string + systemDefault: string +} + +export type ModelSource = + | "override" + | "provider-fallback" + | "system-default" + +export type ModelResolutionResult = { + model: string + source: ModelSource +} + +export type ExtendedModelResolutionInput = { + userModel?: string + fallbackChain?: FallbackEntry[] + availableModels: Set + systemDefaultModel: string +} -/** - * Normalizes a model string. - * Trims whitespace and treats empty/whitespace-only as undefined. - */ function normalizeModel(model?: string): string | undefined { - const trimmed = model?.trim(); - return trimmed || undefined; + const trimmed = model?.trim() + return trimmed || undefined } -/** - * Resolves the effective model using priority chain: - * userModel → inheritedModel → systemDefault - * - * Empty strings and whitespace-only strings are treated as unset. - */ export function resolveModel(input: ModelResolutionInput): string { - return ( - normalizeModel(input.userModel) ?? - normalizeModel(input.inheritedModel) ?? - input.systemDefault - ); + return ( + normalizeModel(input.userModel) ?? + normalizeModel(input.inheritedModel) ?? + input.systemDefault + ) +} + +export function resolveModelWithFallback( + input: ExtendedModelResolutionInput, +): ModelResolutionResult { + const { userModel, fallbackChain, availableModels, systemDefaultModel } = input + + // Step 1: Override + const normalizedUserModel = normalizeModel(userModel) + if (normalizedUserModel) { + log("Model resolved via override", { model: normalizedUserModel }) + return { model: normalizedUserModel, source: "override" } + } + + // Step 2: Provider fallback chain (with availability check) + if (fallbackChain && fallbackChain.length > 0) { + for (const entry of fallbackChain) { + for (const provider of entry.providers) { + const fullModel = `${provider}/${entry.model}` + const match = fuzzyMatchModel(fullModel, availableModels, [provider]) + if (match) { + log("Model resolved via fallback chain (availability confirmed)", { provider, model: entry.model, match }) + return { model: match, source: "provider-fallback" } + } + } + } + + // Step 3: Use first entry in fallbackChain as fallback (no availability match found) + // This ensures category/agent intent is honored even if availableModels is incomplete + const firstEntry = fallbackChain[0] + if (firstEntry.providers.length > 0) { + const fallbackModel = `${firstEntry.providers[0]}/${firstEntry.model}` + log("Model resolved via fallback chain first entry (no availability match)", { model: fallbackModel }) + return { model: fallbackModel, source: "provider-fallback" } + } + } + + // Step 4: System default + log("Model resolved via system default", { model: systemDefaultModel }) + return { model: systemDefaultModel, source: "system-default" } }