fix(multimodal-looker): update fallback chain order (#1050)
New order: 1. google/gemini-3-flash 2. openai/gpt-5.2 3. zai-coding-plan/glm-4.6v 4. anthropic/claude-haiku-4-5 5. opencode/gpt-5-nano (FREE, ultimate fallback) Co-authored-by: justsisyphus <justsisyphus@users.noreply.github.com>
This commit is contained in:
parent
04633ba208
commit
faf172a91d
@ -405,7 +405,7 @@ Each agent has a defined provider priority chain. The system tries providers in
|
|||||||
| **oracle** | `gpt-5.2` | openai → anthropic → google → github-copilot → opencode |
|
| **oracle** | `gpt-5.2` | openai → anthropic → google → github-copilot → opencode |
|
||||||
| **librarian** | `big-pickle` | opencode → github-copilot → anthropic |
|
| **librarian** | `big-pickle` | opencode → github-copilot → anthropic |
|
||||||
| **explore** | `gpt-5-nano` | opencode → anthropic → github-copilot |
|
| **explore** | `gpt-5-nano` | opencode → anthropic → github-copilot |
|
||||||
| **multimodal-looker** | `gemini-3-flash` | google → anthropic → zai → openai → github-copilot → opencode |
|
| **multimodal-looker** | `gemini-3-flash` | google → openai → zai-coding-plan → anthropic → opencode |
|
||||||
| **Prometheus (Planner)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
| **Prometheus (Planner)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
||||||
| **Metis (Plan Consultant)** | `claude-sonnet-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
| **Metis (Plan Consultant)** | `claude-sonnet-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
||||||
| **Momus (Plan Reviewer)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
| **Momus (Plan Reviewer)** | `claude-opus-4-5` | anthropic → github-copilot → opencode → antigravity → google |
|
||||||
|
|||||||
@ -112,9 +112,9 @@ Each agent has a **provider priority chain**. The system tries providers in orde
|
|||||||
|
|
||||||
```
|
```
|
||||||
Example: multimodal-looker
|
Example: multimodal-looker
|
||||||
google → anthropic → zai → openai → github-copilot → opencode
|
google → openai → zai-coding-plan → anthropic → opencode
|
||||||
↓ ↓ ↓ ↓ ↓ ↓
|
↓ ↓ ↓ ↓ ↓
|
||||||
gemini haiku glm-4.6 gpt-5.2 fallback fallback
|
gemini gpt-5.2 glm-4.6v haiku gpt-5-nano
|
||||||
```
|
```
|
||||||
|
|
||||||
If you have Gemini, it uses `google/gemini-3-flash`. No Gemini but have Claude? Uses `anthropic/claude-haiku-4-5`. And so on.
|
If you have Gemini, it uses `google/gemini-3-flash`. No Gemini but have Claude? Uses `anthropic/claude-haiku-4-5`. And so on.
|
||||||
@ -131,7 +131,7 @@ Here's a real-world config for a user with **Claude, OpenAI, Gemini, and Z.ai**
|
|||||||
"Atlas": { "model": "anthropic/claude-sonnet-4-5", "variant": "max" },
|
"Atlas": { "model": "anthropic/claude-sonnet-4-5", "variant": "max" },
|
||||||
"librarian": { "model": "zai-coding-plan/glm-4.7" },
|
"librarian": { "model": "zai-coding-plan/glm-4.7" },
|
||||||
"explore": { "model": "opencode/gpt-5-nano" },
|
"explore": { "model": "opencode/gpt-5-nano" },
|
||||||
"multimodal-looker": { "model": "zai-coding-plan/glm-4.6" }
|
"multimodal-looker": { "model": "zai-coding-plan/glm-4.6v" }
|
||||||
},
|
},
|
||||||
"categories": {
|
"categories": {
|
||||||
// Override categories for cost optimization
|
// Override categories for cost optimization
|
||||||
|
|||||||
@ -854,7 +854,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian whe
|
|||||||
"model": "opencode/big-pickle",
|
"model": "opencode/big-pickle",
|
||||||
},
|
},
|
||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
"model": "zai-coding-plan/glm-4.6",
|
"model": "zai-coding-plan/glm-4.6v",
|
||||||
},
|
},
|
||||||
"oracle": {
|
"oracle": {
|
||||||
"model": "opencode/big-pickle",
|
"model": "opencode/big-pickle",
|
||||||
@ -912,7 +912,7 @@ exports[`generateModelConfig fallback providers uses ZAI model for librarian wit
|
|||||||
"model": "opencode/big-pickle",
|
"model": "opencode/big-pickle",
|
||||||
},
|
},
|
||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
"model": "zai-coding-plan/glm-4.6",
|
"model": "zai-coding-plan/glm-4.6v",
|
||||||
},
|
},
|
||||||
"oracle": {
|
"oracle": {
|
||||||
"model": "opencode/big-pickle",
|
"model": "opencode/big-pickle",
|
||||||
@ -1099,7 +1099,7 @@ exports[`generateModelConfig mixed provider scenarios uses Claude + ZAI combinat
|
|||||||
"model": "anthropic/claude-opus-4-5",
|
"model": "anthropic/claude-opus-4-5",
|
||||||
},
|
},
|
||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
"model": "anthropic/claude-haiku-4-5",
|
"model": "zai-coding-plan/glm-4.6v",
|
||||||
},
|
},
|
||||||
"oracle": {
|
"oracle": {
|
||||||
"model": "anthropic/claude-opus-4-5",
|
"model": "anthropic/claude-opus-4-5",
|
||||||
|
|||||||
@ -41,10 +41,10 @@ export const AGENT_MODEL_REQUIREMENTS: Record<string, ModelRequirement> = {
|
|||||||
"multimodal-looker": {
|
"multimodal-looker": {
|
||||||
fallbackChain: [
|
fallbackChain: [
|
||||||
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
{ providers: ["google", "github-copilot", "opencode"], model: "gemini-3-flash" },
|
||||||
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
|
|
||||||
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
|
||||||
{ providers: ["opencode"], model: "gpt-5-nano" },
|
|
||||||
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
{ providers: ["openai", "github-copilot", "opencode"], model: "gpt-5.2" },
|
||||||
|
{ providers: ["zai-coding-plan"], model: "glm-4.6v" },
|
||||||
|
{ providers: ["anthropic", "github-copilot", "opencode"], model: "claude-haiku-4-5" },
|
||||||
|
{ providers: ["opencode"], model: "gpt-5-nano" },
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
prometheus: {
|
prometheus: {
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user