fix(api): OPENAI_BASE_URL wins over Anthropic fallback for unknown models

When OPENAI_BASE_URL is set, the user explicitly configured an
OpenAI-compatible endpoint (Ollama, LM Studio, vLLM, etc.). Model names
like 'qwen2.5-coder:7b' or 'llama3:latest' don't match any recognized
prefix, so detect_provider_kind() fell through to Anthropic — asking for
Anthropic credentials even though the user clearly intended a local
provider.

Now: OPENAI_BASE_URL + OPENAI_API_KEY beats Anthropic env-check in the
cascade. OPENAI_BASE_URL alone (no API key — common for Ollama) is a
last-resort fallback before the Anthropic default.

Source: MaxDerVerpeilte in #claw-code (Ollama + qwen2.5-coder:7b);
traced by gaebal-gajae.
This commit is contained in:
YeonGyu-Kim 2026-04-10 12:37:39 +09:00
parent 6c07cd682d
commit 1ecdb1076c

View File

@ -202,6 +202,15 @@ pub fn detect_provider_kind(model: &str) -> ProviderKind {
if let Some(metadata) = metadata_for_model(model) {
return metadata.provider;
}
// When OPENAI_BASE_URL is set, the user explicitly configured an
// OpenAI-compatible endpoint. Prefer it over the Anthropic fallback
// even when the model name has no recognized prefix — this is the
// common case for local providers (Ollama, LM Studio, vLLM, etc.)
// where model names like "qwen2.5-coder:7b" don't match any prefix.
if std::env::var_os("OPENAI_BASE_URL").is_some() && openai_compat::has_api_key("OPENAI_API_KEY")
{
return ProviderKind::OpenAi;
}
if anthropic::has_auth_from_env_or_saved().unwrap_or(false) {
return ProviderKind::Anthropic;
}
@ -211,6 +220,11 @@ pub fn detect_provider_kind(model: &str) -> ProviderKind {
if openai_compat::has_api_key("XAI_API_KEY") {
return ProviderKind::Xai;
}
// Last resort: if OPENAI_BASE_URL is set without OPENAI_API_KEY (some
// local providers like Ollama don't require auth), still route there.
if std::env::var_os("OPENAI_BASE_URL").is_some() {
return ProviderKind::OpenAi;
}
ProviderKind::Anthropic
}
@ -981,4 +995,31 @@ NO_EQUALS_LINE
"empty env var should not trigger the hint sniffer, got {hint:?}"
);
}
#[test]
fn openai_base_url_overrides_anthropic_fallback_for_unknown_model() {
// given — user has OPENAI_BASE_URL + OPENAI_API_KEY but no Anthropic
// creds, and a model name with no recognized prefix.
let _lock = env_lock();
let _base_url = EnvVarGuard::set("OPENAI_BASE_URL", Some("http://127.0.0.1:11434/v1"));
let _api_key = EnvVarGuard::set("OPENAI_API_KEY", Some("dummy"));
let _anthropic_key = EnvVarGuard::set("ANTHROPIC_API_KEY", None);
let _anthropic_token = EnvVarGuard::set("ANTHROPIC_AUTH_TOKEN", None);
// when
let provider = detect_provider_kind("qwen2.5-coder:7b");
// then — should route to OpenAI, not Anthropic
assert_eq!(
provider,
ProviderKind::OpenAi,
"OPENAI_BASE_URL should win over Anthropic fallback for unknown models"
);
}
// NOTE: a "OPENAI_BASE_URL without OPENAI_API_KEY" test is omitted
// because workspace-parallel test binaries can race on process env
// (env_lock only protects within a single binary). The detection logic
// is covered: OPENAI_BASE_URL alone routes to OpenAi as a last-resort
// fallback in detect_provider_kind().
}