fix(model-fetch): support /models for Anthropic-compat subpath providers

Providers like DeepSeek, Kimi, Zhipu GLM and MiniMax expose the
Anthropic-compatible API on a subpath (e.g. /anthropic) while the
OpenAI-style /models endpoint lives at the API root. The previous
heuristic blindly appended /v1/models to the Base URL, so every such
provider returned 404 and the UI mislabeled it as "provider does not
support fetching models".

Backend now generates a candidate list and tries them in order:
preset override -> baseURL /v1/models -> stripped-subpath /v1/models ->
stripped-subpath /models. Non-404/405 responses (auth, network) stop
immediately so we never retry against hostile status codes. Known
compat suffixes are kept in a length-descending constant so the
longest match wins; response bodies are truncated to 512 chars to
avoid HTML 404 pages bloating the error string.

Preset type gains an optional modelsUrl (DeepSeek points at
https://api.deepseek.com/models). Frontend threads the override
through fetchModelsForConfig when the current Base URL still matches
the preset default. A new fetchModelsEndpointNotFound i18n key
replaces the misleading "not supported" toast for exhausted-candidate
and 404/405 cases (zh/en/ja).
This commit is contained in:
Jason
2026-04-24 11:49:49 +08:00
parent fcd83ee30d
commit 67dbfc0a8c
8 changed files with 342 additions and 69 deletions

View File

@@ -6,13 +6,20 @@ use crate::services::model_fetch::{self, FetchedModel};
/// 获取供应商的可用模型列表
///
/// 使用 OpenAI 兼容的 GET /v1/models 端点。
/// 主要面向第三方聚合站硅基流动、OpenRouter 等)
/// 使用 OpenAI 兼容的 GET /v1/models 端点。优先使用 `models_url` 精确覆写;
/// 否则对 baseURL 生成候选列表(含「剥离 Anthropic 兼容子路径」兜底),按序尝试
#[tauri::command(rename_all = "camelCase")]
pub async fn fetch_models_for_config(
base_url: String,
api_key: String,
is_full_url: Option<bool>,
models_url: Option<String>,
) -> Result<Vec<FetchedModel>, String> {
model_fetch::fetch_models(&base_url, &api_key, is_full_url.unwrap_or(false)).await
model_fetch::fetch_models(
&base_url,
&api_key,
is_full_url.unwrap_or(false),
models_url.as_deref(),
)
.await
}

View File

@@ -1,8 +1,10 @@
//! 模型列表获取服务
//!
//! 通过 OpenAI 兼容的 GET /v1/models 端点获取供应商可用模型列表。
//! 主要面向第三方聚合站硅基流动、OpenRouter 等)
//! 主要面向第三方聚合站硅基流动、OpenRouter 等),以及把 Anthropic
//! 协议挂在兼容子路径上的官方供应商DeepSeek、Kimi、智谱 GLM 等)。
use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
use std::time::Duration;
@@ -28,85 +30,184 @@ struct ModelEntry {
const FETCH_TIMEOUT_SECS: u64 = 15;
/// 404/405 响应体截断长度:避免把几十 KB HTML 404 页整页保留到错误串里。
const ERROR_BODY_MAX_CHARS: usize = 512;
/// 已知的「Anthropic 协议兼容子路径」后缀;按长度降序,最长前缀优先匹配。
/// baseURL 命中这些后缀时,候选列表会追加「剥离后缀再拼 /v1/models / /models」的版本。
const KNOWN_COMPAT_SUFFIXES: &[&str] = &[
"/api/claudecode",
"/api/anthropic",
"/apps/anthropic",
"/api/coding",
"/claudecode",
"/anthropic",
"/step_plan",
"/coding",
"/claude",
];
/// 获取供应商的可用模型列表
///
/// 使用 OpenAI 兼容的 GET /v1/models 端点。
/// 使用 OpenAI 兼容的 GET /v1/models 端点,按候选列表顺序尝试
pub async fn fetch_models(
base_url: &str,
api_key: &str,
is_full_url: bool,
models_url_override: Option<&str>,
) -> Result<Vec<FetchedModel>, String> {
if api_key.is_empty() {
return Err("API Key is required to fetch models".to_string());
}
let models_url = build_models_url(base_url, is_full_url)?;
let candidates = build_models_url_candidates(base_url, is_full_url, models_url_override)?;
let client = crate::proxy::http_client::get();
let mut last_err: Option<String> = None;
let response = client
.get(&models_url)
.header("Authorization", format!("Bearer {api_key}"))
.timeout(Duration::from_secs(FETCH_TIMEOUT_SECS))
.send()
.await
.map_err(|e| format!("Request failed: {e}"))?;
for url in &candidates {
log::debug!("[ModelFetch] Trying endpoint: {url}");
let response = match client
.get(url)
.header("Authorization", format!("Bearer {api_key}"))
.timeout(Duration::from_secs(FETCH_TIMEOUT_SECS))
.send()
.await
{
Ok(r) => r,
Err(e) => {
return Err(format!("Request failed: {e}"));
}
};
let status = response.status();
if !status.is_success() {
let body = response.text().await.unwrap_or_default();
let status = response.status();
if status.is_success() {
let resp: ModelsResponse = response
.json()
.await
.map_err(|e| format!("Failed to parse response: {e}"))?;
let mut models: Vec<FetchedModel> = resp
.data
.unwrap_or_default()
.into_iter()
.map(|m| FetchedModel {
id: m.id,
owned_by: m.owned_by,
})
.collect();
models.sort_by(|a, b| a.id.cmp(&b.id));
return Ok(models);
}
if status == StatusCode::NOT_FOUND || status == StatusCode::METHOD_NOT_ALLOWED {
let body = truncate_body(response.text().await.unwrap_or_default());
last_err = Some(format!("HTTP {status}: {body}"));
continue;
}
let body = truncate_body(response.text().await.unwrap_or_default());
return Err(format!("HTTP {status}: {body}"));
}
let resp: ModelsResponse = response
.json()
.await
.map_err(|e| format!("Failed to parse response: {e}"))?;
let mut models: Vec<FetchedModel> = resp
.data
.unwrap_or_default()
.into_iter()
.map(|m| FetchedModel {
id: m.id,
owned_by: m.owned_by,
})
.collect();
models.sort_by(|a, b| a.id.cmp(&b.id));
Ok(models)
Err(format!(
"All candidates failed: {}",
last_err.unwrap_or_else(|| "no candidates".to_string())
))
}
/// 构造 /v1/models 的完整 URL
fn build_models_url(base_url: &str, is_full_url: bool) -> Result<String, String> {
let trimmed = base_url.trim().trim_end_matches('/');
/// 构造「模型列表端点」的候选 URL 列表
///
/// 候选顺序:
/// 1. `models_url_override` 非空 → 只返回它
/// 2. baseURL 直接拼 `/v1/models`(若已有 `/v1` 结尾则拼 `/models`
/// 3. 若 baseURL 命中 [`KNOWN_COMPAT_SUFFIXES`],剥离后缀再拼 `/v1/models`
/// 4. 同上,但拼 `/models`(部分站点如 DeepSeek 官方只暴露 `/models`
///
/// 结果已去重且保持首次出现顺序。
pub fn build_models_url_candidates(
base_url: &str,
is_full_url: bool,
models_url_override: Option<&str>,
) -> Result<Vec<String>, String> {
if let Some(raw) = models_url_override {
let trimmed = raw.trim();
if !trimmed.is_empty() {
return Ok(vec![trimmed.to_string()]);
}
}
let trimmed = base_url.trim().trim_end_matches('/');
if trimmed.is_empty() {
return Err("Base URL is empty".to_string());
}
let mut candidates: Vec<String> = Vec::new();
if is_full_url {
// 尝试从完整端点 URL 推导 API 根路径
// 例如: https://proxy.example.com/v1/chat/completions → https://proxy.example.com/v1/models
if let Some(idx) = trimmed.find("/v1/") {
return Ok(format!("{}/v1/models", &trimmed[..idx]));
}
// 如果没有 /v1/ 路径,直接去掉最后一段路径
if let Some(idx) = trimmed.rfind('/') {
candidates.push(format!("{}/v1/models", &trimmed[..idx]));
} else if let Some(idx) = trimmed.rfind('/') {
let root = &trimmed[..idx];
if root.contains("://") && root.len() > root.find("://").unwrap() + 3 {
return Ok(format!("{root}/v1/models"));
candidates.push(format!("{root}/v1/models"));
}
}
return Err("Cannot derive models endpoint from full URL".to_string());
if candidates.is_empty() {
return Err("Cannot derive models endpoint from full URL".to_string());
}
return Ok(candidates);
}
// 常规情况: base_url 是 API 根路径
// 如果已经包含 /v1 路径,直接追加 /models
if trimmed.ends_with("/v1") {
return Ok(format!("{trimmed}/models"));
let primary = if trimmed.ends_with("/v1") {
format!("{trimmed}/models")
} else {
format!("{trimmed}/v1/models")
};
candidates.push(primary);
if let Some(stripped) = strip_compat_suffix(trimmed) {
let root = stripped.trim_end_matches('/');
if !root.is_empty() && root.contains("://") {
candidates.push(format!("{root}/v1/models"));
candidates.push(format!("{root}/models"));
}
}
Ok(format!("{trimmed}/v1/models"))
// 候选最多 3 条,线性去重即可,不值得上 HashSet。
let mut unique: Vec<String> = Vec::with_capacity(candidates.len());
for url in candidates {
if !unique.iter().any(|u| u == &url) {
unique.push(url);
}
}
Ok(unique)
}
/// 截断响应体到 [`ERROR_BODY_MAX_CHARS`] 字符,避免 HTML 404 页占用错误串。
fn truncate_body(body: String) -> String {
if body.chars().count() <= ERROR_BODY_MAX_CHARS {
body
} else {
let mut s: String = body.chars().take(ERROR_BODY_MAX_CHARS).collect();
s.push_str("");
s
}
}
/// 若 baseURL 以任一已知兼容子路径结尾,返回剥离后的剩余部分;否则 `None`。
///
/// 依赖 [`KNOWN_COMPAT_SUFFIXES`] 按长度降序排列,确保最长前缀优先命中
/// (否则 `/anthropic` 会提前匹配掉 `/api/anthropic` 的场景)。
fn strip_compat_suffix(base_url: &str) -> Option<&str> {
for suffix in KNOWN_COMPAT_SUFFIXES {
if base_url.ends_with(*suffix) {
return Some(&base_url[..base_url.len() - suffix.len()]);
}
}
None
}
#[cfg(test)]
@@ -114,40 +215,174 @@ mod tests {
use super::*;
#[test]
fn test_build_models_url_basic() {
fn test_candidates_plain_root() {
let c = build_models_url_candidates("https://api.siliconflow.cn", false, None).unwrap();
assert_eq!(c, vec!["https://api.siliconflow.cn/v1/models"]);
}
#[test]
fn test_candidates_trailing_slash() {
let c = build_models_url_candidates("https://api.example.com/", false, None).unwrap();
assert_eq!(c, vec!["https://api.example.com/v1/models"]);
}
#[test]
fn test_candidates_with_v1() {
let c = build_models_url_candidates("https://api.example.com/v1", false, None).unwrap();
assert_eq!(c, vec!["https://api.example.com/v1/models"]);
}
#[test]
fn test_candidates_full_url() {
let c = build_models_url_candidates(
"https://proxy.example.com/v1/chat/completions",
true,
None,
)
.unwrap();
assert_eq!(c, vec!["https://proxy.example.com/v1/models"]);
}
#[test]
fn test_candidates_empty() {
assert!(build_models_url_candidates("", false, None).is_err());
}
#[test]
fn test_candidates_override_returns_single() {
let c = build_models_url_candidates(
"https://api.deepseek.com/anthropic",
false,
Some("https://api.deepseek.com/models"),
)
.unwrap();
assert_eq!(c, vec!["https://api.deepseek.com/models"]);
}
#[test]
fn test_candidates_override_empty_falls_through() {
let c =
build_models_url_candidates("https://api.siliconflow.cn", false, Some(" ")).unwrap();
assert_eq!(c, vec!["https://api.siliconflow.cn/v1/models"]);
}
#[test]
fn test_candidates_deepseek_strip_anthropic() {
let c =
build_models_url_candidates("https://api.deepseek.com/anthropic", false, None).unwrap();
assert_eq!(
build_models_url("https://api.siliconflow.cn", false).unwrap(),
"https://api.siliconflow.cn/v1/models"
c,
vec![
"https://api.deepseek.com/anthropic/v1/models",
"https://api.deepseek.com/v1/models",
"https://api.deepseek.com/models",
]
);
}
#[test]
fn test_build_models_url_trailing_slash() {
fn test_candidates_zhipu_strip_api_anthropic() {
let c = build_models_url_candidates("https://open.bigmodel.cn/api/anthropic", false, None)
.unwrap();
assert_eq!(
build_models_url("https://api.example.com/", false).unwrap(),
"https://api.example.com/v1/models"
c,
vec![
"https://open.bigmodel.cn/api/anthropic/v1/models",
"https://open.bigmodel.cn/v1/models",
"https://open.bigmodel.cn/models",
]
);
}
#[test]
fn test_build_models_url_with_v1() {
fn test_candidates_bailian_strip_apps_anthropic() {
let c = build_models_url_candidates(
"https://dashscope.aliyuncs.com/apps/anthropic",
false,
None,
)
.unwrap();
assert_eq!(
build_models_url("https://api.example.com/v1", false).unwrap(),
"https://api.example.com/v1/models"
c,
vec![
"https://dashscope.aliyuncs.com/apps/anthropic/v1/models",
"https://dashscope.aliyuncs.com/v1/models",
"https://dashscope.aliyuncs.com/models",
]
);
}
#[test]
fn test_build_models_url_full_url() {
fn test_candidates_stepfun_strip_step_plan() {
let c =
build_models_url_candidates("https://api.stepfun.com/step_plan", false, None).unwrap();
assert_eq!(
build_models_url("https://proxy.example.com/v1/chat/completions", true).unwrap(),
"https://proxy.example.com/v1/models"
c,
vec![
"https://api.stepfun.com/step_plan/v1/models",
"https://api.stepfun.com/v1/models",
"https://api.stepfun.com/models",
]
);
}
#[test]
fn test_build_models_url_empty() {
assert!(build_models_url("", false).is_err());
fn test_candidates_doubao_strip_api_coding() {
let c = build_models_url_candidates(
"https://ark.cn-beijing.volces.com/api/coding",
false,
None,
)
.unwrap();
assert_eq!(
c,
vec![
"https://ark.cn-beijing.volces.com/api/coding/v1/models",
"https://ark.cn-beijing.volces.com/v1/models",
"https://ark.cn-beijing.volces.com/models",
]
);
}
#[test]
fn test_candidates_rightcode_strip_claude() {
let c = build_models_url_candidates("https://www.right.codes/claude", false, None).unwrap();
assert_eq!(
c,
vec![
"https://www.right.codes/claude/v1/models",
"https://www.right.codes/v1/models",
"https://www.right.codes/models",
]
);
}
#[test]
fn test_candidates_longer_suffix_wins() {
// baseURL 以 /api/anthropic 结尾时,应剥离整个 /api/anthropic
// 而不是只剥离 /anthropic那样会得到残缺的 https://.../api 根)。
let c = build_models_url_candidates("https://api.z.ai/api/anthropic", false, None).unwrap();
assert_eq!(
c,
vec![
"https://api.z.ai/api/anthropic/v1/models",
"https://api.z.ai/v1/models",
"https://api.z.ai/models",
]
);
}
#[test]
fn test_candidates_no_suffix_no_strip() {
let c = build_models_url_candidates("https://openrouter.ai/api", false, None).unwrap();
assert_eq!(c, vec!["https://openrouter.ai/api/v1/models"]);
}
#[test]
fn test_candidates_deduplicate() {
// 虚构 casebaseURL 就是 "scheme://host",剥不出子路径,应只有一个候选。
let c = build_models_url_candidates("https://host.example.com", false, None).unwrap();
assert_eq!(c.len(), 1);
}
#[test]

View File

@@ -50,7 +50,10 @@ import type {
ClaudeApiFormat,
ClaudeApiKeyField,
} from "@/types";
import type { TemplateValueConfig } from "@/config/claudeProviderPresets";
import {
providerPresets,
type TemplateValueConfig,
} from "@/config/claudeProviderPresets";
interface EndpointCandidate {
url: string;
@@ -212,8 +215,16 @@ export function ClaudeFormFields({
});
return;
}
// 当 baseURL 仍是某预设的默认值时,优先使用预设上的 modelsUrl 覆写
// 避免多走一次失败的候选请求(如 DeepSeek 把 /models 挂在根,而不是 /anthropic 子路径下)
const matchedPreset = providerPresets.find((p) => {
const env = (p.settingsConfig as { env?: Record<string, string> })?.env;
return env?.ANTHROPIC_BASE_URL === baseUrl;
});
const modelsUrl = matchedPreset?.modelsUrl;
setIsFetchingModels(true);
fetchModelsForConfig(baseUrl, apiKey, isFullUrl)
fetchModelsForConfig(baseUrl, apiKey, isFullUrl, modelsUrl)
.then((models) => {
setFetchedModels(models);
if (models.length === 0) {

View File

@@ -66,6 +66,10 @@ export interface ProviderPreset {
// 是否在 UI 中隐藏该预设(预设仍存在,仅不在列表中显示)
hidden?: boolean;
// 获取模型列表使用的完整 URL覆写自动候选逻辑
// 缺省时后端基于 baseURL 自动尝试 /v1/models、/models 以及剥离已知兼容子路径后的变体。
modelsUrl?: string;
}
export const providerPresets: ProviderPreset[] = [
@@ -136,6 +140,8 @@ export const providerPresets: ProviderPreset[] = [
},
},
category: "cn_official",
// Anthropic 兼容层挂在 /anthropic 子路径;/models 是根上独立端点
modelsUrl: "https://api.deepseek.com/models",
icon: "deepseek",
iconColor: "#1E88E5",
},

View File

@@ -874,6 +874,7 @@
"fetchModelsNeedConfig": "Please fill in API endpoint and API Key first",
"fetchModelsAuthFailed": "API Key is invalid or lacks permission",
"fetchModelsNotSupported": "This provider does not support fetching model list",
"fetchModelsEndpointNotFound": "No reachable models endpoint found. Please check the Base URL or confirm whether the provider exposes this API.",
"fetchModelsTimeout": "Request timed out, please check network connection"
},
"copilot": {

View File

@@ -874,6 +874,7 @@
"fetchModelsNeedConfig": "先に API エンドポイントと API Key を入力してください",
"fetchModelsAuthFailed": "API Key が無効か、権限がありません",
"fetchModelsNotSupported": "このプロバイダーはモデル一覧の取得に対応していません",
"fetchModelsEndpointNotFound": "利用可能なモデル一覧エンドポイントが見つかりません。Base URL を確認するか、プロバイダーが該当 API を公開しているかご確認ください",
"fetchModelsTimeout": "リクエストがタイムアウトしました。ネットワーク接続を確認してください"
},
"copilot": {

View File

@@ -874,6 +874,7 @@
"fetchModelsNeedConfig": "请先填写 API 端点和 API Key",
"fetchModelsAuthFailed": "API Key 无效或无权限",
"fetchModelsNotSupported": "该供应商不支持获取模型列表",
"fetchModelsEndpointNotFound": "未找到可用的模型列表端点,请检查 Base URL 或确认供应商是否开放该接口",
"fetchModelsTimeout": "请求超时,请检查网络连接"
},
"copilot": {

View File

@@ -10,15 +10,21 @@ export interface FetchedModel {
/**
* 从供应商获取可用模型列表
*
* 使用 OpenAI 兼容的 GET /v1/models 端点。
* 主要面向第三方聚合站硅基流动、OpenRouter 等)。
* 使用 OpenAI 兼容的 GET /v1/models 端点。优先用 `modelsUrl` 精确覆写;
* 否则后端会对 baseURL 生成候选列表并按序尝试(含"剥离 /anthropic 等兼容子路径"兜底)。
*/
export async function fetchModelsForConfig(
baseUrl: string,
apiKey: string,
isFullUrl?: boolean,
modelsUrl?: string,
): Promise<FetchedModel[]> {
return invoke("fetch_models_for_config", { baseUrl, apiKey, isFullUrl });
return invoke("fetch_models_for_config", {
baseUrl,
apiKey,
isFullUrl,
modelsUrl,
});
}
/**
@@ -50,8 +56,13 @@ export function showFetchModelsError(
toast.error(t("providerForm.fetchModelsAuthFailed"));
return;
}
// 所有候选端点均返回 404/405供应商可能未开放 /models 接口,或 Base URL 有误
if (msg.includes("All candidates failed")) {
toast.error(t("providerForm.fetchModelsEndpointNotFound"));
return;
}
if (msg.includes("HTTP 404") || msg.includes("HTTP 405")) {
toast.error(t("providerForm.fetchModelsNotSupported"));
toast.error(t("providerForm.fetchModelsEndpointNotFound"));
return;
}
if (msg.includes("timeout") || msg.includes("timed out")) {