feat(agents): wire Gemini prompt routing into Sisyphus-Junior, Atlas, Prometheus

Add 'gemini' to prompt source types and route Gemini models to new
Gemini-optimized prompts via isGeminiModel detection. Update barrel
exports for all 3 agent modules. All existing tests pass.
This commit is contained in:
YeonGyu-Kim 2026-02-22 15:11:35 +09:00
parent bf33e6f651
commit 49e885d81d
6 changed files with 32 additions and 8 deletions

View File

@ -6,12 +6,13 @@
* *
* Routing: * Routing:
* 1. GPT models (openai/*, github-copilot/gpt-*) gpt.ts (GPT-5.2 optimized) * 1. GPT models (openai/*, github-copilot/gpt-*) gpt.ts (GPT-5.2 optimized)
* 2. Default (Claude, etc.) default.ts (Claude-optimized) * 2. Gemini models (google/*, google-vertex/*) gemini.ts (Gemini-optimized)
* 3. Default (Claude, etc.) default.ts (Claude-optimized)
*/ */
import type { AgentConfig } from "@opencode-ai/sdk" import type { AgentConfig } from "@opencode-ai/sdk"
import type { AgentMode, AgentPromptMetadata } from "../types" import type { AgentMode, AgentPromptMetadata } from "../types"
import { isGptModel } from "../types" import { isGptModel, isGeminiModel } from "../types"
import type { AvailableAgent, AvailableSkill, AvailableCategory } from "../dynamic-agent-prompt-builder" import type { AvailableAgent, AvailableSkill, AvailableCategory } from "../dynamic-agent-prompt-builder"
import { buildCategorySkillsDelegationGuide } from "../dynamic-agent-prompt-builder" import { buildCategorySkillsDelegationGuide } from "../dynamic-agent-prompt-builder"
import type { CategoryConfig } from "../../config/schema" import type { CategoryConfig } from "../../config/schema"
@ -20,6 +21,7 @@ import { createAgentToolRestrictions } from "../../shared/permission-compat"
import { getDefaultAtlasPrompt } from "./default" import { getDefaultAtlasPrompt } from "./default"
import { getGptAtlasPrompt } from "./gpt" import { getGptAtlasPrompt } from "./gpt"
import { getGeminiAtlasPrompt } from "./gemini"
import { import {
getCategoryDescription, getCategoryDescription,
buildAgentSelectionSection, buildAgentSelectionSection,
@ -30,7 +32,7 @@ import {
const MODE: AgentMode = "primary" const MODE: AgentMode = "primary"
export type AtlasPromptSource = "default" | "gpt" export type AtlasPromptSource = "default" | "gpt" | "gemini"
/** /**
* Determines which Atlas prompt to use based on model. * Determines which Atlas prompt to use based on model.
@ -39,6 +41,9 @@ export function getAtlasPromptSource(model?: string): AtlasPromptSource {
if (model && isGptModel(model)) { if (model && isGptModel(model)) {
return "gpt" return "gpt"
} }
if (model && isGeminiModel(model)) {
return "gemini"
}
return "default" return "default"
} }
@ -58,6 +63,8 @@ export function getAtlasPrompt(model?: string): string {
switch (source) { switch (source) {
case "gpt": case "gpt":
return getGptAtlasPrompt() return getGptAtlasPrompt()
case "gemini":
return getGeminiAtlasPrompt()
case "default": case "default":
default: default:
return getDefaultAtlasPrompt() return getDefaultAtlasPrompt()

View File

@ -1,5 +1,6 @@
export { ATLAS_SYSTEM_PROMPT, getDefaultAtlasPrompt } from "./default" export { ATLAS_SYSTEM_PROMPT, getDefaultAtlasPrompt } from "./default"
export { ATLAS_GPT_SYSTEM_PROMPT, getGptAtlasPrompt } from "./gpt" export { ATLAS_GPT_SYSTEM_PROMPT, getGptAtlasPrompt } from "./gpt"
export { ATLAS_GEMINI_SYSTEM_PROMPT, getGeminiAtlasPrompt } from "./gemini"
export { export {
getCategoryDescription, getCategoryDescription,
buildAgentSelectionSection, buildAgentSelectionSection,

View File

@ -6,6 +6,7 @@ export {
} from "./system-prompt" } from "./system-prompt"
export type { PrometheusPromptSource } from "./system-prompt" export type { PrometheusPromptSource } from "./system-prompt"
export { PROMETHEUS_GPT_SYSTEM_PROMPT, getGptPrometheusPrompt } from "./gpt" export { PROMETHEUS_GPT_SYSTEM_PROMPT, getGptPrometheusPrompt } from "./gpt"
export { PROMETHEUS_GEMINI_SYSTEM_PROMPT, getGeminiPrometheusPrompt } from "./gemini"
// Re-export individual sections for granular access // Re-export individual sections for granular access
export { PROMETHEUS_IDENTITY_CONSTRAINTS } from "./identity-constraints" export { PROMETHEUS_IDENTITY_CONSTRAINTS } from "./identity-constraints"

View File

@ -5,7 +5,8 @@ import { PROMETHEUS_HIGH_ACCURACY_MODE } from "./high-accuracy-mode"
import { PROMETHEUS_PLAN_TEMPLATE } from "./plan-template" import { PROMETHEUS_PLAN_TEMPLATE } from "./plan-template"
import { PROMETHEUS_BEHAVIORAL_SUMMARY } from "./behavioral-summary" import { PROMETHEUS_BEHAVIORAL_SUMMARY } from "./behavioral-summary"
import { getGptPrometheusPrompt } from "./gpt" import { getGptPrometheusPrompt } from "./gpt"
import { isGptModel } from "../types" import { getGeminiPrometheusPrompt } from "./gemini"
import { isGptModel, isGeminiModel } from "../types"
/** /**
* Combined Prometheus system prompt (Claude-optimized, default). * Combined Prometheus system prompt (Claude-optimized, default).
@ -30,7 +31,7 @@ export const PROMETHEUS_PERMISSION = {
question: "allow" as const, question: "allow" as const,
} }
export type PrometheusPromptSource = "default" | "gpt" export type PrometheusPromptSource = "default" | "gpt" | "gemini"
/** /**
* Determines which Prometheus prompt to use based on model. * Determines which Prometheus prompt to use based on model.
@ -39,12 +40,16 @@ export function getPrometheusPromptSource(model?: string): PrometheusPromptSourc
if (model && isGptModel(model)) { if (model && isGptModel(model)) {
return "gpt" return "gpt"
} }
if (model && isGeminiModel(model)) {
return "gemini"
}
return "default" return "default"
} }
/** /**
* Gets the appropriate Prometheus prompt based on model. * Gets the appropriate Prometheus prompt based on model.
* GPT models GPT-5.2 optimized prompt (XML-tagged, principle-driven) * GPT models GPT-5.2 optimized prompt (XML-tagged, principle-driven)
* Gemini models Gemini-optimized prompt (aggressive tool-call enforcement, thinking checkpoints)
* Default (Claude, etc.) Claude-optimized prompt (modular sections) * Default (Claude, etc.) Claude-optimized prompt (modular sections)
*/ */
export function getPrometheusPrompt(model?: string): string { export function getPrometheusPrompt(model?: string): string {
@ -53,6 +58,8 @@ export function getPrometheusPrompt(model?: string): string {
switch (source) { switch (source) {
case "gpt": case "gpt":
return getGptPrometheusPrompt() return getGptPrometheusPrompt()
case "gemini":
return getGeminiPrometheusPrompt()
case "default": case "default":
default: default:
return PROMETHEUS_SYSTEM_PROMPT return PROMETHEUS_SYSTEM_PROMPT

View File

@ -6,12 +6,13 @@
* *
* Routing: * Routing:
* 1. GPT models (openai/*, github-copilot/gpt-*) -> gpt.ts (GPT-5.2 optimized) * 1. GPT models (openai/*, github-copilot/gpt-*) -> gpt.ts (GPT-5.2 optimized)
* 2. Default (Claude, etc.) -> default.ts (Claude-optimized) * 2. Gemini models (google/*, google-vertex/*) -> gemini.ts (Gemini-optimized)
* 3. Default (Claude, etc.) -> default.ts (Claude-optimized)
*/ */
import type { AgentConfig } from "@opencode-ai/sdk" import type { AgentConfig } from "@opencode-ai/sdk"
import type { AgentMode } from "../types" import type { AgentMode } from "../types"
import { isGptModel } from "../types" import { isGptModel, isGeminiModel } from "../types"
import type { AgentOverrideConfig } from "../../config/schema" import type { AgentOverrideConfig } from "../../config/schema"
import { import {
createAgentToolRestrictions, createAgentToolRestrictions,
@ -20,6 +21,7 @@ import {
import { buildDefaultSisyphusJuniorPrompt } from "./default" import { buildDefaultSisyphusJuniorPrompt } from "./default"
import { buildGptSisyphusJuniorPrompt } from "./gpt" import { buildGptSisyphusJuniorPrompt } from "./gpt"
import { buildGeminiSisyphusJuniorPrompt } from "./gemini"
const MODE: AgentMode = "subagent" const MODE: AgentMode = "subagent"
@ -32,7 +34,7 @@ export const SISYPHUS_JUNIOR_DEFAULTS = {
temperature: 0.1, temperature: 0.1,
} as const } as const
export type SisyphusJuniorPromptSource = "default" | "gpt" export type SisyphusJuniorPromptSource = "default" | "gpt" | "gemini"
/** /**
* Determines which Sisyphus-Junior prompt to use based on model. * Determines which Sisyphus-Junior prompt to use based on model.
@ -41,6 +43,9 @@ export function getSisyphusJuniorPromptSource(model?: string): SisyphusJuniorPro
if (model && isGptModel(model)) { if (model && isGptModel(model)) {
return "gpt" return "gpt"
} }
if (model && isGeminiModel(model)) {
return "gemini"
}
return "default" return "default"
} }
@ -57,6 +62,8 @@ export function buildSisyphusJuniorPrompt(
switch (source) { switch (source) {
case "gpt": case "gpt":
return buildGptSisyphusJuniorPrompt(useTaskSystem, promptAppend) return buildGptSisyphusJuniorPrompt(useTaskSystem, promptAppend)
case "gemini":
return buildGeminiSisyphusJuniorPrompt(useTaskSystem, promptAppend)
case "default": case "default":
default: default:
return buildDefaultSisyphusJuniorPrompt(useTaskSystem, promptAppend) return buildDefaultSisyphusJuniorPrompt(useTaskSystem, promptAppend)

View File

@ -1,5 +1,6 @@
export { buildDefaultSisyphusJuniorPrompt } from "./default" export { buildDefaultSisyphusJuniorPrompt } from "./default"
export { buildGptSisyphusJuniorPrompt } from "./gpt" export { buildGptSisyphusJuniorPrompt } from "./gpt"
export { buildGeminiSisyphusJuniorPrompt } from "./gemini"
export { export {
SISYPHUS_JUNIOR_DEFAULTS, SISYPHUS_JUNIOR_DEFAULTS,