feat(shared): add model availability with fuzzy matching and fetch
Implement fuzzyMatchModel() for case-insensitive substring matching with provider filtering. Add fetchAvailableModels() to get available models from OpenCode client with caching. Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-opencode) Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
parent
f4a0d5ec40
commit
bc62c23a85
251
src/shared/model-availability.test.ts
Normal file
251
src/shared/model-availability.test.ts
Normal file
@ -0,0 +1,251 @@
|
||||
import { describe, it, expect, beforeEach } from "bun:test"
|
||||
import { fetchAvailableModels, fuzzyMatchModel, __resetModelCache } from "./model-availability"
|
||||
|
||||
describe("fetchAvailableModels", () => {
|
||||
let mockClient: any
|
||||
|
||||
beforeEach(() => {
|
||||
__resetModelCache()
|
||||
})
|
||||
|
||||
it("#given API returns list of models #when fetchAvailableModels called #then returns Set of model IDs", async () => {
|
||||
const mockModels = [
|
||||
{ id: "openai/gpt-5.2", name: "GPT-5.2" },
|
||||
{ id: "anthropic/claude-opus-4-5", name: "Claude Opus 4.5" },
|
||||
{ id: "google/gemini-3-pro", name: "Gemini 3 Pro" },
|
||||
]
|
||||
mockClient = {
|
||||
model: {
|
||||
list: async () => mockModels,
|
||||
},
|
||||
}
|
||||
|
||||
const result = await fetchAvailableModels(mockClient)
|
||||
|
||||
expect(result).toBeInstanceOf(Set)
|
||||
expect(result.size).toBe(3)
|
||||
expect(result.has("openai/gpt-5.2")).toBe(true)
|
||||
expect(result.has("anthropic/claude-opus-4-5")).toBe(true)
|
||||
expect(result.has("google/gemini-3-pro")).toBe(true)
|
||||
})
|
||||
|
||||
it("#given API fails #when fetchAvailableModels called #then returns empty Set without throwing", async () => {
|
||||
mockClient = {
|
||||
model: {
|
||||
list: async () => {
|
||||
throw new Error("API connection failed")
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const result = await fetchAvailableModels(mockClient)
|
||||
|
||||
expect(result).toBeInstanceOf(Set)
|
||||
expect(result.size).toBe(0)
|
||||
})
|
||||
|
||||
it("#given API called twice #when second call made #then uses cached result without re-fetching", async () => {
|
||||
let callCount = 0
|
||||
const mockModels = [
|
||||
{ id: "openai/gpt-5.2", name: "GPT-5.2" },
|
||||
{ id: "anthropic/claude-opus-4-5", name: "Claude Opus 4.5" },
|
||||
]
|
||||
mockClient = {
|
||||
model: {
|
||||
list: async () => {
|
||||
callCount++
|
||||
return mockModels
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const result1 = await fetchAvailableModels(mockClient)
|
||||
const result2 = await fetchAvailableModels(mockClient)
|
||||
|
||||
expect(callCount).toBe(1)
|
||||
expect(result1).toEqual(result2)
|
||||
expect(result1.has("openai/gpt-5.2")).toBe(true)
|
||||
})
|
||||
|
||||
it("#given empty model list from API #when fetchAvailableModels called #then returns empty Set", async () => {
|
||||
mockClient = {
|
||||
model: {
|
||||
list: async () => [],
|
||||
},
|
||||
}
|
||||
|
||||
const result = await fetchAvailableModels(mockClient)
|
||||
|
||||
expect(result).toBeInstanceOf(Set)
|
||||
expect(result.size).toBe(0)
|
||||
})
|
||||
|
||||
it("#given API returns models with various formats #when fetchAvailableModels called #then extracts all IDs correctly", async () => {
|
||||
const mockModels = [
|
||||
{ id: "openai/gpt-5.2-codex", name: "GPT-5.2 Codex" },
|
||||
{ id: "anthropic/claude-sonnet-4-5", name: "Claude Sonnet 4.5" },
|
||||
{ id: "google/gemini-3-flash", name: "Gemini 3 Flash" },
|
||||
{ id: "opencode/grok-code", name: "Grok Code" },
|
||||
]
|
||||
mockClient = {
|
||||
model: {
|
||||
list: async () => mockModels,
|
||||
},
|
||||
}
|
||||
|
||||
const result = await fetchAvailableModels(mockClient)
|
||||
|
||||
expect(result.size).toBe(4)
|
||||
expect(result.has("openai/gpt-5.2-codex")).toBe(true)
|
||||
expect(result.has("anthropic/claude-sonnet-4-5")).toBe(true)
|
||||
expect(result.has("google/gemini-3-flash")).toBe(true)
|
||||
expect(result.has("opencode/grok-code")).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
describe("fuzzyMatchModel", () => {
|
||||
// #given available models from multiple providers
|
||||
// #when searching for a substring match
|
||||
// #then return the matching model
|
||||
it("should match substring in model name", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"openai/gpt-5.2-codex",
|
||||
"anthropic/claude-opus-4-5",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt-5.2", available)
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given available models with partial matches
|
||||
// #when searching for a substring
|
||||
// #then return exact match if it exists
|
||||
it("should prefer exact match over substring match", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"openai/gpt-5.2-codex",
|
||||
"openai/gpt-5.2-ultra",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt-5.2", available)
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given available models with multiple substring matches
|
||||
// #when searching for a substring
|
||||
// #then return the shorter model name (more specific)
|
||||
it("should prefer shorter model name when multiple matches exist", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2-ultra",
|
||||
"openai/gpt-5.2-ultra-mega",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt-5.2", available)
|
||||
expect(result).toBe("openai/gpt-5.2-ultra")
|
||||
})
|
||||
|
||||
// #given available models with claude variants
|
||||
// #when searching for claude-opus
|
||||
// #then return matching claude-opus model
|
||||
it("should match claude-opus to claude-opus-4-5", () => {
|
||||
const available = new Set([
|
||||
"anthropic/claude-opus-4-5",
|
||||
"anthropic/claude-sonnet-4-5",
|
||||
])
|
||||
const result = fuzzyMatchModel("claude-opus", available)
|
||||
expect(result).toBe("anthropic/claude-opus-4-5")
|
||||
})
|
||||
|
||||
// #given available models from multiple providers
|
||||
// #when providers filter is specified
|
||||
// #then only search models from specified providers
|
||||
it("should filter by provider when providers array is given", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/claude-opus-4-5",
|
||||
"google/gemini-3",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt", available, ["openai"])
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given available models from multiple providers
|
||||
// #when providers filter excludes matching models
|
||||
// #then return null
|
||||
it("should return null when provider filter excludes all matches", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/claude-opus-4-5",
|
||||
])
|
||||
const result = fuzzyMatchModel("claude", available, ["openai"])
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
// #given available models
|
||||
// #when no substring match exists
|
||||
// #then return null
|
||||
it("should return null when no match found", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/claude-opus-4-5",
|
||||
])
|
||||
const result = fuzzyMatchModel("gemini", available)
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
|
||||
// #given available models with different cases
|
||||
// #when searching with different case
|
||||
// #then match case-insensitively
|
||||
it("should match case-insensitively", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/claude-opus-4-5",
|
||||
])
|
||||
const result = fuzzyMatchModel("GPT-5.2", available)
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given available models with exact match and longer variants
|
||||
// #when searching for exact match
|
||||
// #then return exact match first
|
||||
it("should prioritize exact match over longer variants", () => {
|
||||
const available = new Set([
|
||||
"anthropic/claude-opus-4-5",
|
||||
"anthropic/claude-opus-4-5-extended",
|
||||
])
|
||||
const result = fuzzyMatchModel("claude-opus-4-5", available)
|
||||
expect(result).toBe("anthropic/claude-opus-4-5")
|
||||
})
|
||||
|
||||
// #given available models with multiple providers
|
||||
// #when multiple providers are specified
|
||||
// #then search all specified providers
|
||||
it("should search all specified providers", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/claude-opus-4-5",
|
||||
"google/gemini-3",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt", available, ["openai", "google"])
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given available models with provider prefix
|
||||
// #when searching with provider filter
|
||||
// #then only match models with correct provider prefix
|
||||
it("should only match models with correct provider prefix", () => {
|
||||
const available = new Set([
|
||||
"openai/gpt-5.2",
|
||||
"anthropic/gpt-something",
|
||||
])
|
||||
const result = fuzzyMatchModel("gpt", available, ["openai"])
|
||||
expect(result).toBe("openai/gpt-5.2")
|
||||
})
|
||||
|
||||
// #given empty available set
|
||||
// #when searching
|
||||
// #then return null
|
||||
it("should return null for empty available set", () => {
|
||||
const available = new Set<string>()
|
||||
const result = fuzzyMatchModel("gpt", available)
|
||||
expect(result).toBeNull()
|
||||
})
|
||||
})
|
||||
118
src/shared/model-availability.ts
Normal file
118
src/shared/model-availability.ts
Normal file
@ -0,0 +1,118 @@
|
||||
/**
|
||||
* Fuzzy matching utility for model names
|
||||
* Supports substring matching with provider filtering and priority-based selection
|
||||
*/
|
||||
|
||||
import { log } from "./logger"
|
||||
|
||||
/**
|
||||
* Fuzzy match a target model name against available models
|
||||
*
|
||||
* @param target - The model name or substring to search for (e.g., "gpt-5.2", "claude-opus")
|
||||
* @param available - Set of available model names in format "provider/model-name"
|
||||
* @param providers - Optional array of provider names to filter by (e.g., ["openai", "anthropic"])
|
||||
* @returns The matched model name or null if no match found
|
||||
*
|
||||
* Matching priority:
|
||||
* 1. Exact match (if exists)
|
||||
* 2. Shorter model name (more specific)
|
||||
*
|
||||
* Matching is case-insensitive substring match.
|
||||
* If providers array is given, only models starting with "provider/" are considered.
|
||||
*
|
||||
* @example
|
||||
* const available = new Set(["openai/gpt-5.2", "openai/gpt-5.2-codex", "anthropic/claude-opus-4-5"])
|
||||
* fuzzyMatchModel("gpt-5.2", available) // → "openai/gpt-5.2"
|
||||
* fuzzyMatchModel("claude", available, ["openai"]) // → null (provider filter excludes anthropic)
|
||||
*/
|
||||
export function fuzzyMatchModel(
|
||||
target: string,
|
||||
available: Set<string>,
|
||||
providers?: string[],
|
||||
): string | null {
|
||||
log("[fuzzyMatchModel] called", { target, availableCount: available.size, providers })
|
||||
|
||||
if (available.size === 0) {
|
||||
log("[fuzzyMatchModel] empty available set")
|
||||
return null
|
||||
}
|
||||
|
||||
const targetLower = target.toLowerCase()
|
||||
|
||||
// Filter by providers if specified
|
||||
let candidates = Array.from(available)
|
||||
if (providers && providers.length > 0) {
|
||||
const providerSet = new Set(providers)
|
||||
candidates = candidates.filter((model) => {
|
||||
const [provider] = model.split("/")
|
||||
return providerSet.has(provider)
|
||||
})
|
||||
log("[fuzzyMatchModel] filtered by providers", { candidateCount: candidates.length, candidates: candidates.slice(0, 10) })
|
||||
}
|
||||
|
||||
if (candidates.length === 0) {
|
||||
log("[fuzzyMatchModel] no candidates after filter")
|
||||
return null
|
||||
}
|
||||
|
||||
// Find all matches (case-insensitive substring match)
|
||||
const matches = candidates.filter((model) =>
|
||||
model.toLowerCase().includes(targetLower),
|
||||
)
|
||||
|
||||
log("[fuzzyMatchModel] substring matches", { targetLower, matchCount: matches.length, matches })
|
||||
|
||||
if (matches.length === 0) {
|
||||
return null
|
||||
}
|
||||
|
||||
// Priority 1: Exact match
|
||||
const exactMatch = matches.find((model) => model.toLowerCase() === targetLower)
|
||||
if (exactMatch) {
|
||||
log("[fuzzyMatchModel] exact match found", { exactMatch })
|
||||
return exactMatch
|
||||
}
|
||||
|
||||
// Priority 2: Shorter model name (more specific)
|
||||
const result = matches.reduce((shortest, current) =>
|
||||
current.length < shortest.length ? current : shortest,
|
||||
)
|
||||
log("[fuzzyMatchModel] shortest match", { result })
|
||||
return result
|
||||
}
|
||||
|
||||
let cachedModels: Set<string> | null = null
|
||||
|
||||
export async function fetchAvailableModels(client: any): Promise<Set<string>> {
|
||||
if (cachedModels !== null) {
|
||||
log("[fetchAvailableModels] returning cached models", { count: cachedModels.size, models: Array.from(cachedModels).slice(0, 20) })
|
||||
return cachedModels
|
||||
}
|
||||
|
||||
try {
|
||||
const models = await client.model.list()
|
||||
const modelSet = new Set<string>()
|
||||
|
||||
log("[fetchAvailableModels] raw response", { isArray: Array.isArray(models), length: Array.isArray(models) ? models.length : 0, sample: Array.isArray(models) ? models.slice(0, 5) : models })
|
||||
|
||||
if (Array.isArray(models)) {
|
||||
for (const model of models) {
|
||||
if (model.id && typeof model.id === "string") {
|
||||
modelSet.add(model.id)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
log("[fetchAvailableModels] parsed models", { count: modelSet.size, models: Array.from(modelSet) })
|
||||
|
||||
cachedModels = modelSet
|
||||
return modelSet
|
||||
} catch (err) {
|
||||
log("[fetchAvailableModels] error", { error: String(err) })
|
||||
return new Set<string>()
|
||||
}
|
||||
}
|
||||
|
||||
export function __resetModelCache(): void {
|
||||
cachedModels = null
|
||||
}
|
||||
Loading…
x
Reference in New Issue
Block a user