🐛 fix(ai/provider/chat-ui): 修复千问 Coding Plan 预设与 Claude CLI 报错

- 统一千问 Coding Plan 到 claude-cli 链路
- 修正旧配置识别与模型列表逻辑
- 透传 Claude CLI 鉴权失败和错误事件
- 移除误杀正常回复的启动定时器
This commit is contained in:
Syngnat
2026-03-27 17:02:51 +08:00
parent 4f74c44147
commit 09aa526570
10 changed files with 765 additions and 62 deletions

View File

@@ -11,6 +11,7 @@ import {
QWEN_CODING_PLAN_MODELS,
resolvePresetBaseURL,
resolvePresetModelSelection,
resolvePresetTransport,
} from '../utils/aiProviderPresets';
import {
PROVIDER_PRESET_CARD_BASE_STYLE,
@@ -37,6 +38,7 @@ interface ProviderPreset {
desc: string;
color: string;
backendType: AIProviderType;
fixedApiFormat?: string;
defaultBaseUrl: string;
defaultModel: string;
models: string[];
@@ -46,7 +48,7 @@ const PROVIDER_PRESETS: ProviderPreset[] = [
{ key: 'openai', label: 'OpenAI', icon: <ApiOutlined />, desc: 'GPT-5.4 / 5.3 系列', color: '#10b981', backendType: 'openai', defaultBaseUrl: 'https://api.openai.com/v1', defaultModel: 'gpt-4o', models: [] },
{ key: 'deepseek', label: 'DeepSeek', icon: <ThunderboltOutlined />, desc: 'DeepSeek-V4 / R1', color: '#3b82f6', backendType: 'openai', defaultBaseUrl: 'https://api.deepseek.com/v1', defaultModel: 'deepseek-chat', models: [] },
{ key: 'qwen-bailian', label: '通义千问(百炼通用)', icon: <CloudOutlined />, desc: '百炼 Anthropic 兼容 / 模型从远端拉取', color: '#6366f1', backendType: 'anthropic', defaultBaseUrl: QWEN_BAILIAN_ANTHROPIC_BASE_URL, defaultModel: '', models: [] },
{ key: 'qwen-coding-plan', label: '通义千问Coding Plan', icon: <CloudOutlined />, desc: 'Coding Plan 专属入口 / 使用官方支持模型清单', color: '#4f46e5', backendType: 'anthropic', defaultBaseUrl: QWEN_CODING_PLAN_ANTHROPIC_BASE_URL, defaultModel: '', models: QWEN_CODING_PLAN_MODELS },
{ key: 'qwen-coding-plan', label: '通义千问Coding Plan', icon: <CloudOutlined />, desc: 'Claude Code CLI 代理链路 / 使用官方支持模型清单', color: '#4f46e5', backendType: 'custom', fixedApiFormat: 'claude-cli', defaultBaseUrl: QWEN_CODING_PLAN_ANTHROPIC_BASE_URL, defaultModel: '', models: QWEN_CODING_PLAN_MODELS },
{ key: 'zhipu', label: '智谱 GLM', icon: <ExperimentOutlined />, desc: 'GLM-5 / GLM-5-Turbo', color: '#0ea5e9', backendType: 'openai', defaultBaseUrl: 'https://open.bigmodel.cn/api/paas/v4', defaultModel: 'glm-4', models: [] },
{ key: 'moonshot', label: 'Kimi', icon: <ExperimentOutlined />, desc: 'Kimi K2.5 (Anthropic 兼容)', color: '#0d9488', backendType: 'anthropic', defaultBaseUrl: 'https://api.moonshot.cn/anthropic', defaultModel: 'moonshot-v1-8k', models: [] },
{ key: 'anthropic', label: 'Claude', icon: <ExperimentOutlined />, desc: 'Claude Opus/Sonnet', color: '#d97706', backendType: 'anthropic', defaultBaseUrl: 'https://api.anthropic.com', defaultModel: 'claude-3-5-sonnet-20241022', models: [] },
@@ -167,11 +169,22 @@ const AISettingsModal: React.FC<AISettingsModalProps> = ({ open, onClose, darkMo
const handleEditProvider = (p: AIProviderConfig) => {
// 尝试根据 baseUrl 和 type 推断 preset
const matchedPreset = matchProviderPreset(p);
const resolvedTransport = resolvePresetTransport({
presetBackendType: matchedPreset.backendType,
presetFixedApiFormat: matchedPreset.fixedApiFormat,
valuesApiFormat: p.apiFormat,
});
setEditingProvider(p);
setIsEditing(true);
setTestStatus('idle');
form.resetFields();
form.setFieldsValue({ ...p, type: matchedPreset.backendType, models: p.models || [], presetKey: matchedPreset.key, apiFormat: p.apiFormat || 'openai' });
form.setFieldsValue({
...p,
type: resolvedTransport.type,
models: p.models || [],
presetKey: matchedPreset.key,
apiFormat: resolvedTransport.apiFormat || p.apiFormat || 'openai',
});
};
const handleDeleteProvider = async (id: string) => {
@@ -220,15 +233,21 @@ const AISettingsModal: React.FC<AISettingsModalProps> = ({ open, onClose, darkMo
presetDefaultBaseUrl: preset.defaultBaseUrl,
valuesBaseUrl: values.baseUrl,
});
const resolvedTransport = resolvePresetTransport({
presetBackendType: preset.backendType,
presetFixedApiFormat: preset.fixedApiFormat,
valuesApiFormat: values.apiFormat,
});
const payload = {
...editingProvider,
...values,
...resolvedTransport,
name: finalName,
model: finalModel,
models: resolvedModels,
baseUrl: finalBaseUrl,
apiFormat: values.apiFormat || 'openai',
apiFormat: resolvedTransport.apiFormat,
};
// 后端 AISaveProvider 统一处理新增和更新,返回 void失败抛异常
await Service?.AISaveProvider?.(payload);
@@ -277,7 +296,29 @@ const AISettingsModal: React.FC<AISettingsModalProps> = ({ open, onClose, darkMo
presetDefaultBaseUrl: preset.defaultBaseUrl,
valuesBaseUrl: values.baseUrl,
});
const res = await Service?.AITestProvider?.({ ...values, baseUrl: finalBaseUrl, maxTokens: Number(values.maxTokens) || 4096, temperature: Number(values.temperature) ?? 0.7 });
const { model: finalModel, models: resolvedModels } = resolvePresetModelSelection({
presetKey: values.presetKey || 'openai',
presetDefaultModel: preset.defaultModel,
presetModels: preset.models,
valuesModel: values.model,
customModels: values.models,
});
const resolvedTransport = resolvePresetTransport({
presetBackendType: preset.backendType,
presetFixedApiFormat: preset.fixedApiFormat,
valuesApiFormat: values.apiFormat,
});
const res = await Service?.AITestProvider?.({
...editingProvider,
...values,
...resolvedTransport,
baseUrl: finalBaseUrl,
model: finalModel,
models: resolvedModels,
maxTokens: Number(values.maxTokens) || 4096,
temperature: Number(values.temperature) ?? 0.7,
apiFormat: resolvedTransport.apiFormat,
});
if (res?.success) { setTestStatus('success'); void messageApi.success('连接成功'); }
else { setTestStatus('error'); void messageApi.error(`测试失败: ${res?.message || '未知错误'}`); }
} catch (e: any) { setTestStatus('error'); void messageApi.error(e?.message || '测试失败'); }
@@ -286,9 +327,15 @@ const AISettingsModal: React.FC<AISettingsModalProps> = ({ open, onClose, darkMo
const handlePresetChange = (presetKey: string) => {
const preset = findPreset(presetKey);
const resolvedTransport = resolvePresetTransport({
presetBackendType: preset.backendType,
presetFixedApiFormat: preset.fixedApiFormat,
valuesApiFormat: form.getFieldValue('apiFormat'),
});
form.setFieldsValue({
presetKey,
type: preset.backendType,
type: resolvedTransport.type,
apiFormat: resolvedTransport.apiFormat || 'openai',
baseUrl: preset.defaultBaseUrl,
model: preset.defaultModel,
});

View File

@@ -204,7 +204,7 @@ export interface AIProviderConfig {
baseUrl: string;
model: string;
models?: string[];
apiFormat?: string; // custom 专用: openai | anthropic | gemini
apiFormat?: string; // custom 专用: openai | anthropic | gemini | claude-cli
headers?: Record<string, string>;
maxTokens: number;
temperature: number;
@@ -243,4 +243,3 @@ export interface AISafetyResult {
requiresConfirm: boolean;
warningMessage?: string;
}

View File

@@ -7,6 +7,7 @@ import {
QWEN_CODING_PLAN_MODELS,
resolvePresetBaseURL,
resolvePresetModelSelection,
resolvePresetTransport,
} from './aiProviderPresets';
describe('ai provider preset helpers', () => {
@@ -24,6 +25,27 @@ describe('ai provider preset helpers', () => {
})).toBe('qwen-coding-plan');
});
it('maps Coding Plan Claude CLI config back to the dedicated Coding Plan preset', () => {
expect(matchQwenPresetKey({
type: 'custom',
apiFormat: 'claude-cli',
baseUrl: QWEN_CODING_PLAN_ANTHROPIC_BASE_URL,
})).toBe('qwen-coding-plan');
});
it('does not keep a baked-in model list for the Coding Plan preset', () => {
expect(QWEN_CODING_PLAN_MODELS).toEqual([
'qwen3.5-plus',
'kimi-k2.5',
'glm-5',
'MiniMax-M2.5',
'qwen3-max-2026-01-23',
'qwen3-coder-next',
'qwen3-coder-plus',
'glm-4.7',
]);
});
it('keeps built-in preset model empty when the preset intentionally requires an explicit selection', () => {
expect(resolvePresetModelSelection({
presetKey: 'qwen-coding-plan',
@@ -65,4 +87,25 @@ describe('ai provider preset helpers', () => {
valuesBaseUrl: 'https://example-proxy.internal/v1',
})).toBe('https://example-proxy.internal/v1');
});
it('forces qwen coding plan to save as custom plus claude-cli', () => {
expect(resolvePresetTransport({
presetBackendType: 'custom',
presetFixedApiFormat: 'claude-cli',
valuesApiFormat: 'anthropic',
})).toEqual({
type: 'custom',
apiFormat: 'claude-cli',
});
});
it('keeps custom preset transport editable', () => {
expect(resolvePresetTransport({
presetBackendType: 'custom',
valuesApiFormat: 'gemini',
})).toEqual({
type: 'custom',
apiFormat: 'gemini',
});
});
});

View File

@@ -1,4 +1,4 @@
import type { AIProviderConfig } from '../types';
import type { AIProviderConfig, AIProviderType } from '../types';
export const LEGACY_QWEN_BAILIAN_OPENAI_BASE_URL = 'https://dashscope.aliyuncs.com/compatible-mode/v1';
export const LEGACY_QWEN_CODING_PLAN_OPENAI_BASE_URL = 'https://coding.dashscope.aliyuncs.com/v1';
@@ -7,12 +7,14 @@ export const QWEN_CODING_PLAN_ANTHROPIC_BASE_URL = 'https://coding.dashscope.ali
export const QWEN_BAILIAN_MODELS_BASE_URL = LEGACY_QWEN_BAILIAN_OPENAI_BASE_URL;
export const QWEN_CODING_PLAN_MODELS = [
'qwen3.5-plus',
'kimi-k2.5',
'glm-5',
'MiniMax-M2.5',
'qwen3-max-2026-01-23',
'qwen3-coder-next',
'qwen3-coder-plus',
'qwen3-coder-480b-a35b-instruct',
'qwen3-coder-30b-a3b-instruct',
'qwen3-coder-flash',
'qwen-plus',
'qwen-turbo',
'glm-4.7',
];
const CUSTOM_LIKE_PRESET_KEYS = new Set(['custom', 'ollama']);
@@ -36,6 +38,17 @@ export interface ResolvePresetBaseURLInput {
valuesBaseUrl?: string;
}
export interface ResolvePresetTransportInput {
presetBackendType: AIProviderType;
presetFixedApiFormat?: string;
valuesApiFormat?: string;
}
export interface ResolvePresetTransportResult {
type: AIProviderType;
apiFormat?: string;
}
export const getProviderHostname = (raw?: string): string => {
if (!raw) return '';
try {
@@ -56,7 +69,7 @@ export const getProviderFingerprint = (raw?: string): string => {
}
};
export const matchQwenPresetKey = (provider: Pick<AIProviderConfig, 'type' | 'baseUrl'>): string | null => {
export const matchQwenPresetKey = (provider: Pick<AIProviderConfig, 'type' | 'baseUrl' | 'apiFormat'>): string | null => {
const fingerprint = getProviderFingerprint(provider.baseUrl);
const bailianFingerprints = new Set([
getProviderFingerprint(LEGACY_QWEN_BAILIAN_OPENAI_BASE_URL),
@@ -103,3 +116,28 @@ export const resolvePresetBaseURL = ({
}
return presetDefaultBaseUrl;
};
export const resolvePresetTransport = ({
presetBackendType,
presetFixedApiFormat,
valuesApiFormat,
}: ResolvePresetTransportInput): ResolvePresetTransportResult => {
if (presetFixedApiFormat) {
return {
type: presetBackendType,
apiFormat: presetFixedApiFormat,
};
}
if (presetBackendType === 'custom') {
return {
type: presetBackendType,
apiFormat: valuesApiFormat || 'openai',
};
}
return {
type: presetBackendType,
apiFormat: undefined,
};
};

View File

@@ -5,16 +5,20 @@ import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"os"
"os/exec"
"runtime"
"strings"
"time"
ai "GoNavi-Wails/internal/ai"
)
var claudeLookPath = exec.LookPath
var claudeCommandContext = exec.CommandContext
var claudeCLIRequestTimeout = 90 * time.Second
// ClaudeCLIProvider 通过 Claude Code CLI 发送聊天请求
// 适用于 anyrouter/newapi 等只支持 Claude Code 协议的代理服务
@@ -48,19 +52,25 @@ func (p *ClaudeCLIProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
return nil, err
}
ctx, cancel := ensureClaudeCLITimeout(ctx, claudeCLIRequestTimeout)
defer cancel()
prompt := buildPrompt(req.Messages)
args := []string{"-p", prompt, "--output-format", "json", "--no-session-persistence"}
if p.config.Model != "" {
args = append(args, "--model", p.config.Model)
}
cmd := exec.CommandContext(ctx, "claude", args...)
cmd := claudeCommandContext(ctx, "claude", args...)
if err := p.setEnv(cmd); err != nil {
return nil, err
}
output, err := cmd.Output()
if err != nil {
if isClaudeCLITimeout(ctx, err) {
return nil, fmt.Errorf("claude CLI 执行超时(%s当前 Base URL 或 API Key 可能没有返回有效响应", claudeCLIRequestTimeout)
}
if exitErr, ok := err.(*exec.ExitError); ok {
return nil, fmt.Errorf("claude CLI 执行失败: %s", string(exitErr.Stderr))
}
@@ -68,13 +78,14 @@ func (p *ClaudeCLIProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
}
// 解析 JSON 输出
var result struct {
Result string `json:"result"`
}
var result cliStreamEvent
if err := json.Unmarshal(output, &result); err != nil {
// 如果 JSON 解析失败,直接返回原始文本
return &ai.ChatResponse{Content: strings.TrimSpace(string(output))}, nil
}
if errMsg, hasError := extractClaudeCLIEventError(result); hasError {
return nil, fmt.Errorf("claude CLI 返回错误: %s", errMsg)
}
return &ai.ChatResponse{Content: result.Result}, nil
}
@@ -85,6 +96,9 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
return err
}
ctx, cancel := ensureClaudeCLITimeout(ctx, claudeCLIRequestTimeout)
defer cancel()
prompt := buildPrompt(req.Messages)
args := []string{"-p", prompt, "--output-format", "stream-json", "--verbose", "--include-partial-messages", "--no-session-persistence"}
if p.config.Model != "" {
@@ -93,7 +107,7 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
fmt.Printf("[ClaudeCLI DEBUG] Running: claude %v\n", args)
cmd := exec.CommandContext(ctx, "claude", args...)
cmd := claudeCommandContext(ctx, "claude", args...)
if err := p.setEnv(cmd); err != nil {
return err
}
@@ -137,7 +151,23 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
}
switch event.Type {
case "system":
if isClaudeCLISystemRetryEvent(event) {
if errMsg, hasError := extractClaudeCLISystemRetryError(event); hasError {
callback(ai.StreamChunk{Error: errMsg, Done: true})
if cmd.Process != nil {
_ = cmd.Process.Kill()
}
_ = cmd.Wait()
return nil
}
}
case "assistant":
if errMsg, hasError := extractClaudeCLIEventError(event); hasError {
callback(ai.StreamChunk{Error: errMsg, Done: true})
_ = cmd.Wait()
return nil
}
// 助手消息开始或文本内容
if event.Message.Content != nil {
for _, block := range event.Message.Content {
@@ -156,12 +186,18 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
callback(ai.StreamChunk{Content: event.Delta.Text})
}
case "result":
if errMsg, hasError := extractClaudeCLIEventError(event); hasError {
callback(ai.StreamChunk{Error: errMsg, Done: true})
_ = cmd.Wait()
return nil
}
// 最终结果事件 — 不发送 contentassistant 事件已包含),只标记完成
callback(ai.StreamChunk{Done: true})
_ = cmd.Wait()
return nil
case "error":
callback(ai.StreamChunk{Error: event.Error.Message, Done: true})
errMsg, _ := extractClaudeCLIEventError(event)
callback(ai.StreamChunk{Error: errMsg, Done: true})
_ = cmd.Wait()
return nil
}
@@ -171,6 +207,14 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
stderrStr := strings.TrimSpace(stderrBuf.String())
fmt.Printf("[ClaudeCLI DEBUG] Process exited. stderr: %s\n", stderrStr)
if isClaudeCLITimeout(ctx, waitErr) {
callback(ai.StreamChunk{
Error: fmt.Sprintf("claude CLI 执行超时(%s当前 Base URL 或 API Key 可能没有返回有效响应", claudeCLIRequestTimeout),
Done: true,
})
return nil
}
if waitErr != nil {
errMsg := fmt.Sprintf("claude CLI 异常退出: %v", waitErr)
if stderrStr != "" {
@@ -184,6 +228,20 @@ func (p *ClaudeCLIProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
return nil
}
func ensureClaudeCLITimeout(ctx context.Context, timeout time.Duration) (context.Context, context.CancelFunc) {
if _, hasDeadline := ctx.Deadline(); hasDeadline || timeout <= 0 {
return ctx, func() {}
}
return context.WithTimeout(ctx, timeout)
}
func isClaudeCLITimeout(ctx context.Context, err error) bool {
if err == nil {
return false
}
return errors.Is(ctx.Err(), context.DeadlineExceeded) || errors.Is(err, context.DeadlineExceeded)
}
// setEnv 设置 Claude CLI 的环境变量
func (p *ClaudeCLIProvider) setEnv(cmd *exec.Cmd) error {
env, err := buildClaudeCLIEnv(p.config, cmd.Environ(), runtime.GOOS, claudeLookPath, fileExists)
@@ -200,6 +258,7 @@ func buildClaudeCLIEnv(config ai.ProviderConfig, baseEnv []string, goos string,
env = upsertEnv(env, "ANTHROPIC_BASE_URL", strings.TrimRight(config.BaseURL, "/"))
}
if config.APIKey != "" {
env = upsertEnv(env, "ANTHROPIC_AUTH_TOKEN", config.APIKey)
env = upsertEnv(env, "ANTHROPIC_API_KEY", config.APIKey)
}
@@ -354,8 +413,15 @@ func buildPrompt(messages []ai.Message) string {
// cliStreamEvent Claude CLI stream-json 输出的事件结构
type cliStreamEvent struct {
Type string `json:"type"`
Message struct {
Type string `json:"type"`
Subtype string `json:"subtype,omitempty"`
IsError bool `json:"is_error,omitempty"`
Attempt int `json:"attempt,omitempty"`
MaxRetries int `json:"max_retries,omitempty"`
RetryDelayMS float64 `json:"retry_delay_ms,omitempty"`
ErrorStatus int `json:"error_status,omitempty"`
SessionID string `json:"session_id,omitempty"`
Message struct {
Content []struct {
Type string `json:"type"`
Text string `json:"text"`
@@ -367,8 +433,79 @@ type cliStreamEvent struct {
Text string `json:"text"`
Thinking string `json:"thinking"`
} `json:"delta,omitempty"`
Result string `json:"result,omitempty"`
Error struct {
Message string `json:"message"`
} `json:"error,omitempty"`
Result string `json:"result,omitempty"`
Error cliStreamEventError `json:"error,omitempty"`
}
type cliStreamEventError struct {
Message string
}
func (e *cliStreamEventError) UnmarshalJSON(data []byte) error {
trimmed := strings.TrimSpace(string(data))
if trimmed == "" || trimmed == "null" {
e.Message = ""
return nil
}
var text string
if err := json.Unmarshal(data, &text); err == nil {
e.Message = strings.TrimSpace(text)
return nil
}
var payload struct {
Message string `json:"message"`
}
if err := json.Unmarshal(data, &payload); err != nil {
return err
}
e.Message = strings.TrimSpace(payload.Message)
return nil
}
func extractClaudeCLIEventError(event cliStreamEvent) (string, bool) {
if event.Type != "error" && !event.IsError {
return "", false
}
if msg := strings.TrimSpace(event.Result); msg != "" {
return msg, true
}
for _, block := range event.Message.Content {
if block.Type == "text" && strings.TrimSpace(block.Text) != "" {
return strings.TrimSpace(block.Text), true
}
}
if msg := strings.TrimSpace(event.Error.Message); msg != "" {
return msg, true
}
return "claude CLI 返回未知错误", true
}
func isClaudeCLISystemRetryEvent(event cliStreamEvent) bool {
return event.Type == "system" && event.Subtype == "api_retry"
}
func extractClaudeCLISystemRetryError(event cliStreamEvent) (string, bool) {
if !isClaudeCLISystemRetryEvent(event) {
return "", false
}
errText := strings.TrimSpace(event.Error.Message)
if event.ErrorStatus != 401 && event.ErrorStatus != 403 && !strings.EqualFold(errText, "authentication_failed") {
return "", false
}
if errText == "" {
errText = "authentication_failed"
}
if event.ErrorStatus > 0 {
return fmt.Sprintf("claude CLI 鉴权失败 (HTTP %d): %s", event.ErrorStatus, errText), true
}
return fmt.Sprintf("claude CLI 鉴权失败: %s", errText), true
}

View File

@@ -1,9 +1,13 @@
package provider
import (
"context"
"errors"
"os"
"path/filepath"
"strings"
"testing"
"time"
"GoNavi-Wails/internal/ai"
)
@@ -26,6 +30,9 @@ func TestBuildClaudeCLIEnv_IncludesAnthropicProxyEnv(t *testing.T) {
if got := envValue(env, "ANTHROPIC_API_KEY"); got != "sk-test" {
t.Fatalf("expected api key in env, got %q", got)
}
if got := envValue(env, "ANTHROPIC_AUTH_TOKEN"); got != "sk-test" {
t.Fatalf("expected auth token in env, got %q", got)
}
}
func TestBuildClaudeCLIEnv_UsesDetectedGitBashOnWindows(t *testing.T) {
@@ -67,3 +74,281 @@ func TestBuildClaudeCLIEnv_ReturnsActionableErrorWhenGitBashMissingOnWindows(t *
t.Fatalf("expected env var hint, got %v", err)
}
}
func TestClaudeCLIProvider_ChatTimesOutWhenCommandDoesNotFinish(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\nsleep 5\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
originalRequestTimeout := claudeCLIRequestTimeout
claudeCLIRequestTimeout = 200 * time.Millisecond
defer func() {
claudeCLIRequestTimeout = originalRequestTimeout
}()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
start := time.Now()
_, err = provider.Chat(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
})
if err == nil {
t.Fatal("expected chat timeout error")
}
if !strings.Contains(err.Error(), "执行超时") {
t.Fatalf("expected timeout error, got %v", err)
}
if time.Since(start) < 200*time.Millisecond {
t.Fatalf("expected timeout path to wait for configured deadline, took %s", time.Since(start))
}
}
func TestClaudeCLIProvider_ChatStreamUsesRequestTimeoutWhenNoMeaningfulResponseArrives(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"system\",\"subtype\":\"init\"}'\nexec sleep 5\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
originalRequestTimeout := claudeCLIRequestTimeout
claudeCLIRequestTimeout = 200 * time.Millisecond
defer func() {
claudeCLIRequestTimeout = originalRequestTimeout
}()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
var chunks []ai.StreamChunk
err = provider.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected stream provider to report timeout via callback, got %v", err)
}
if len(chunks) == 0 {
t.Fatal("expected timeout chunk")
}
lastChunk := chunks[len(chunks)-1]
if !lastChunk.Done {
t.Fatalf("expected timeout chunk to terminate stream, got %#v", lastChunk)
}
if !strings.Contains(lastChunk.Error, "执行超时") {
t.Fatalf("expected request timeout message, got %#v", lastChunk)
}
}
func TestClaudeCLIProvider_ChatStreamAllowsDelayedMeaningfulResponse(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"system\",\"subtype\":\"init\"}'\nsleep 0.2\necho '{\"type\":\"assistant\",\"message\":{\"content\":[{\"type\":\"text\",\"text\":\"OK\"}]}}'\necho '{\"type\":\"result\",\"subtype\":\"success\",\"is_error\":false,\"result\":\"OK\"}'\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
originalRequestTimeout := claudeCLIRequestTimeout
claudeCLIRequestTimeout = 1 * time.Second
defer func() {
claudeCLIRequestTimeout = originalRequestTimeout
}()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
var chunks []ai.StreamChunk
err = provider.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected delayed response to complete via callback, got %v", err)
}
if len(chunks) == 0 {
t.Fatal("expected delayed response chunks")
}
if chunks[0].Content != "OK" {
t.Fatalf("expected delayed content chunk, got %#v", chunks)
}
if !chunks[len(chunks)-1].Done {
t.Fatalf("expected terminal done chunk, got %#v", chunks[len(chunks)-1])
}
}
func TestClaudeCLIProvider_ChatReturnsErrorWhenJSONResponseIsError(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"result\",\"subtype\":\"success\",\"is_error\":true,\"result\":\"API Error: Unable to connect to API (ECONNRESET)\",\"error\":\"unknown\"}'\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
_, err = provider.Chat(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
})
if err == nil {
t.Fatal("expected chat error when CLI JSON marks request as failed")
}
if !strings.Contains(err.Error(), "Unable to connect to API") {
t.Fatalf("expected upstream API error, got %v", err)
}
}
func TestClaudeCLIProvider_ChatStreamReportsAssistantErrorEvent(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"assistant\",\"is_error\":true,\"message\":{\"content\":[{\"type\":\"text\",\"text\":\"API Error: Unable to connect to API (ECONNRESET)\"}]},\"error\":\"unknown\"}'\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
var chunks []ai.StreamChunk
err = provider.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected stream provider to report error via callback, got %v", err)
}
if len(chunks) != 1 {
t.Fatalf("expected a single terminal error chunk, got %#v", chunks)
}
if chunks[0].Content != "" {
t.Fatalf("expected assistant error event to avoid content output, got %#v", chunks[0])
}
if !chunks[0].Done || !strings.Contains(chunks[0].Error, "Unable to connect to API") {
t.Fatalf("expected upstream API error chunk, got %#v", chunks[0])
}
}
func TestClaudeCLIProvider_ChatStreamReportsResultErrorEvent(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"result\",\"subtype\":\"success\",\"is_error\":true,\"result\":\"API Error: Unable to connect to API (ECONNRESET)\",\"error\":\"unknown\"}'\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
var chunks []ai.StreamChunk
err = provider.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected stream provider to report error via callback, got %v", err)
}
if len(chunks) != 1 {
t.Fatalf("expected a single terminal error chunk, got %#v", chunks)
}
if chunks[0].Content != "" {
t.Fatalf("expected result error event to avoid content output, got %#v", chunks[0])
}
if !chunks[0].Done || !strings.Contains(chunks[0].Error, "Unable to connect to API") {
t.Fatalf("expected upstream API error chunk, got %#v", chunks[0])
}
}
func TestClaudeCLIProvider_ChatStreamReportsApiRetryAuthenticationFailure(t *testing.T) {
fakeClaude := writeFakeClaudeScript(t, "#!/bin/sh\necho '{\"type\":\"system\",\"subtype\":\"api_retry\",\"attempt\":1,\"max_retries\":10,\"retry_delay_ms\":536.11,\"error_status\":401,\"error\":\"authentication_failed\",\"session_id\":\"retry-1\"}'\nexec sleep 5\n")
restore := overrideClaudeCLIForTest(t, fakeClaude)
defer restore()
provider, err := NewClaudeCLIProvider(ai.ProviderConfig{
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
Model: "qwen3.5-plus",
})
if err != nil {
t.Fatalf("unexpected provider error: %v", err)
}
var chunks []ai.StreamChunk
err = provider.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected stream provider to report authentication error via callback, got %v", err)
}
if len(chunks) != 1 {
t.Fatalf("expected a single terminal error chunk, got %#v", chunks)
}
if !chunks[0].Done {
t.Fatalf("expected terminal error chunk, got %#v", chunks[0])
}
if strings.Contains(chunks[0].Error, "未收到模型响应") {
t.Fatalf("expected auth failure instead of startup timeout, got %#v", chunks[0])
}
if !strings.Contains(chunks[0].Error, "401") || !strings.Contains(chunks[0].Error, "authentication_failed") {
t.Fatalf("expected auth retry error details, got %#v", chunks[0])
}
}
func writeFakeClaudeScript(t *testing.T, content string) string {
t.Helper()
dir := t.TempDir()
path := filepath.Join(dir, "claude")
if err := os.WriteFile(path, []byte(content), 0o755); err != nil {
t.Fatalf("failed to write fake claude script: %v", err)
}
return path
}
func overrideClaudeCLIForTest(t *testing.T, fakeClaudePath string) func() {
t.Helper()
originalLookPath := claudeLookPath
claudeLookPath = func(name string) (string, error) {
if name == "claude" {
return fakeClaudePath, nil
}
return originalLookPath(name)
}
originalPath := os.Getenv("PATH")
if err := os.Setenv("PATH", filepath.Dir(fakeClaudePath)+string(os.PathListSeparator)+originalPath); err != nil {
t.Fatalf("failed to override PATH: %v", err)
}
return func() {
claudeLookPath = originalLookPath
_ = os.Setenv("PATH", originalPath)
}
}

View File

@@ -47,16 +47,28 @@ var miniMaxAnthropicModels = []string{
}
var dashScopeCodingPlanModels = []string{
"qwen3.5-plus",
"kimi-k2.5",
"glm-5",
"MiniMax-M2.5",
"qwen3-max-2026-01-23",
"qwen3-coder-next",
"qwen3-coder-plus",
"qwen3-coder-480b-a35b-instruct",
"qwen3-coder-30b-a3b-instruct",
"qwen3-coder-flash",
"qwen-plus",
"qwen-turbo",
"glm-4.7",
}
const dashScopeCodingPlanAnthropicBaseURL = "https://coding.dashscope.aliyuncs.com/apps/anthropic"
var volcengineCodingPlanAllowedExactModels = []string{
"auto",
}
var volcengineCodingPlanAllowedModelFamilies = []string{
"doubao-seed-2.0-code",
"doubao-seed-2.0-pro",
"doubao-seed-2.0-lite",
"doubao-seed-code",
"minimax-m2.5",
"glm-4.7",
"deepseek-v3.2",
"kimi-k2",
@@ -64,6 +76,25 @@ var volcengineCodingPlanAllowedModelFamilies = []string{
const volcengineCodingPlanEmptyModelsError = `当前接口未返回可用的火山 Coding Plan 模型,请检查账号权限或切换到"火山方舟"供应商`
var claudeCLIHealthCheckFunc = func(config ai.ProviderConfig) error {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
defer cancel()
cliProvider, err := provider.NewProvider(config)
if err != nil {
return err
}
_, err = cliProvider.Chat(ctx, ai.ChatRequest{
Messages: []ai.Message{
{Role: "user", Content: "ping"},
},
MaxTokens: 1,
Temperature: 0,
})
return err
}
// NewService 创建 AI Service 实例
func NewService() *Service {
return &Service{
@@ -197,6 +228,12 @@ func (s *Service) AITestProvider(config ai.ProviderConfig) map[string]interface{
err = fmt.Errorf("上游服务器内部错误 (HTTP %d)", resp.StatusCode)
}
}
case "claude-cli":
testConfig := config
if strings.TrimSpace(testConfig.Model) == "" && isDashScopeCodingPlanProvider(testConfig) && len(dashScopeCodingPlanModels) > 0 {
testConfig.Model = dashScopeCodingPlanModels[0]
}
err = claudeCLIHealthCheckFunc(testConfig)
default:
if baseURL != "" {
req, _ := http.NewRequest("GET", baseURL, nil)
@@ -263,8 +300,12 @@ func isDashScopeCodingPlanAnthropicProvider(config ai.ProviderConfig) bool {
if normalizedProviderType(config) != "anthropic" {
return false
}
return isDashScopeCodingPlanProvider(config)
}
func isDashScopeCodingPlanProvider(config ai.ProviderConfig) bool {
host, path := parseProviderBaseURL(config.BaseURL)
return host == "coding.dashscope.aliyuncs.com" && strings.HasPrefix(path, "/apps/anthropic")
return host == "coding.dashscope.aliyuncs.com" && (strings.HasPrefix(path, "/apps/anthropic") || strings.HasPrefix(path, "/v1"))
}
func isVolcengineCodingPlanProvider(config ai.ProviderConfig) bool {
@@ -279,6 +320,17 @@ func filterVolcengineCodingPlanModels(models []string) []string {
filtered := make([]string, 0, len(models))
for _, model := range models {
lowerModel := strings.ToLower(strings.TrimSpace(model))
matched := false
for _, exactModel := range volcengineCodingPlanAllowedExactModels {
if lowerModel == exactModel {
filtered = append(filtered, model)
matched = true
break
}
}
if matched {
continue
}
for _, family := range volcengineCodingPlanAllowedModelFamilies {
if strings.Contains(lowerModel, family) {
filtered = append(filtered, model)
@@ -304,7 +356,7 @@ func defaultStaticModelsForProvider(config ai.ProviderConfig) []string {
if isMiniMaxAnthropicProvider(config) {
return append([]string(nil), miniMaxAnthropicModels...)
}
if isDashScopeCodingPlanAnthropicProvider(config) {
if isDashScopeCodingPlanProvider(config) {
return append([]string(nil), dashScopeCodingPlanModels...)
}
return nil
@@ -314,7 +366,10 @@ func normalizeProviderConfig(config ai.ProviderConfig) ai.ProviderConfig {
switch {
case isDashScopeBailianAnthropicProvider(config):
config.Models = nil
case isDashScopeCodingPlanAnthropicProvider(config):
case isDashScopeCodingPlanProvider(config):
config.Type = "custom"
config.APIFormat = "claude-cli"
config.BaseURL = dashScopeCodingPlanAnthropicBaseURL
config.Models = append([]string(nil), dashScopeCodingPlanModels...)
default:
staticModels := defaultStaticModelsForProvider(config)
@@ -474,6 +529,11 @@ func (s *Service) AIListModels() map[string]interface{} {
return map[string]interface{}{"success": false, "models": []string{}, "error": "未找到活跃 Provider"}
}
config = normalizeProviderConfig(config)
if staticModels := defaultStaticModelsForProvider(config); len(staticModels) > 0 {
return map[string]interface{}{"success": true, "models": staticModels, "source": "static"}
}
models, err := fetchModelsFunc(config)
if err != nil {
// 回退到配置中的静态模型列表

View File

@@ -17,21 +17,34 @@ func TestDefaultStaticModelsForProvider_DoesNotReturnBailianStaticModels(t *test
}
}
func TestDefaultStaticModelsForProvider_ReturnsDashScopeCodingPlanModels(t *testing.T) {
models := defaultStaticModelsForProvider(ai.ProviderConfig{
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
})
func TestDefaultStaticModelsForProvider_ReturnsDashScopeCodingPlanSupportedModels(t *testing.T) {
expected := []string{
"qwen3.5-plus",
"kimi-k2.5",
"glm-5",
"MiniMax-M2.5",
"qwen3-max-2026-01-23",
"qwen3-coder-next",
"qwen3-coder-plus",
"qwen3-coder-480b-a35b-instruct",
"qwen3-coder-30b-a3b-instruct",
"qwen3-coder-flash",
"qwen-plus",
"qwen-turbo",
"glm-4.7",
}
if !reflect.DeepEqual(models, expected) {
t.Fatalf("expected Coding Plan static models %v, got %v", expected, models)
testCases := []ai.ProviderConfig{
{
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
},
{
Type: "custom",
APIFormat: "claude-cli",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
},
}
for _, testCase := range testCases {
models := defaultStaticModelsForProvider(testCase)
if !reflect.DeepEqual(models, expected) {
t.Fatalf("expected Coding Plan supported models %v, got %v for config %#v", expected, models, testCase)
}
}
}
@@ -48,12 +61,21 @@ func TestNormalizeProviderConfig_DoesNotForceModelForDashScopeProviders(t *testi
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
})
if codingPlan.Type != "custom" {
t.Fatalf("expected Coding Plan provider type to normalize to custom, got %q", codingPlan.Type)
}
if codingPlan.APIFormat != "claude-cli" {
t.Fatalf("expected Coding Plan provider api format to normalize to claude-cli, got %q", codingPlan.APIFormat)
}
if codingPlan.Model != "" {
t.Fatalf("expected Coding Plan model to remain empty until explicit selection, got %q", codingPlan.Model)
}
if len(codingPlan.Models) == 0 {
t.Fatal("expected Coding Plan provider to expose official supported models")
}
if codingPlan.Models[0] != "qwen3.5-plus" {
t.Fatalf("expected Coding Plan provider to expose latest supported models, got %v", codingPlan.Models)
}
}
func TestResolveModelsURL_UsesDashScopeCompatibleModelsEndpointForBailianAnthropic(t *testing.T) {
@@ -66,20 +88,70 @@ func TestResolveModelsURL_UsesDashScopeCompatibleModelsEndpointForBailianAnthrop
}
}
func TestNewProviderHealthCheckRequest_UsesMessagesEndpointForDashScopeCodingPlanAnthropic(t *testing.T) {
req, err := newProviderHealthCheckRequest(ai.ProviderConfig{
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
Model: "qwen3-coder-plus",
APIKey: "sk-test",
})
if err != nil {
t.Fatalf("unexpected error: %v", err)
func TestAIListModels_ReturnsStaticModelsForDashScopeCodingPlanWithoutRemoteFetch(t *testing.T) {
originalFetchModelsFunc := fetchModelsFunc
fetchModelsFunc = func(config ai.ProviderConfig) ([]string, error) {
t.Fatalf("expected Coding Plan model list to stay static and skip remote fetch, got config %#v", config)
return nil, nil
}
if req.Method != "POST" {
t.Fatalf("expected POST request, got %s", req.Method)
defer func() {
fetchModelsFunc = originalFetchModelsFunc
}()
service := NewService()
service.providers = []ai.ProviderConfig{
{
ID: "provider-coding-plan",
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
},
}
if req.URL.String() != "https://coding.dashscope.aliyuncs.com/apps/anthropic/v1/messages" {
t.Fatalf("expected Coding Plan messages endpoint, got %q", req.URL.String())
service.activeProvider = "provider-coding-plan"
result := service.AIListModels()
if result["success"] != true {
t.Fatalf("expected AIListModels to succeed, got %#v", result)
}
models, ok := result["models"].([]string)
if !ok {
t.Fatalf("expected []string models, got %#v", result["models"])
}
if len(models) == 0 || models[0] != "qwen3.5-plus" {
t.Fatalf("expected official static Coding Plan models, got %#v", models)
}
if source, _ := result["source"].(string); source != "static" {
t.Fatalf("expected static source, got %#v", result["source"])
}
}
func TestAITestProvider_UsesClaudeCLIHealthCheckForDashScopeCodingPlan(t *testing.T) {
originalClaudeCLIHealthCheckFunc := claudeCLIHealthCheckFunc
defer func() {
claudeCLIHealthCheckFunc = originalClaudeCLIHealthCheckFunc
}()
var received ai.ProviderConfig
claudeCLIHealthCheckFunc = func(config ai.ProviderConfig) error {
received = config
return nil
}
service := NewService()
result := service.AITestProvider(ai.ProviderConfig{
Type: "anthropic",
BaseURL: "https://coding.dashscope.aliyuncs.com/apps/anthropic",
APIKey: "sk-test",
})
if result["success"] != true {
t.Fatalf("expected AITestProvider to succeed, got %#v", result)
}
if received.Type != "custom" {
t.Fatalf("expected Coding Plan test to use custom provider type, got %q", received.Type)
}
if received.APIFormat != "claude-cli" {
t.Fatalf("expected Coding Plan test to use claude-cli api format, got %q", received.APIFormat)
}
if received.Model != "qwen3.5-plus" {
t.Fatalf("expected Coding Plan test to default probe model to qwen3.5-plus, got %q", received.Model)
}
}

View File

@@ -19,16 +19,26 @@ func TestIsVolcengineCodingPlanProvider_MatchesCodingPlanBaseURL(t *testing.T) {
func TestFilterVolcengineCodingPlanModels_KeepsOnlySupportedFamilies(t *testing.T) {
filtered := filterVolcengineCodingPlanModels([]string{
"Auto",
"qwen3-14b-20250429",
"wan2-1-14b-t2v-250225",
"Doubao-Seed-2.0-Code",
"Doubao-Seed-2.0-pro",
"Doubao-Seed-2.0-lite",
"doubao-seed-code-32k-250615",
"MiniMax-M2.5",
"GLM-4.7",
"DeepSeek-V3.2",
"kimi-k2-turbo-preview",
})
expected := []string{
"Auto",
"Doubao-Seed-2.0-Code",
"Doubao-Seed-2.0-pro",
"Doubao-Seed-2.0-lite",
"doubao-seed-code-32k-250615",
"MiniMax-M2.5",
"GLM-4.7",
"DeepSeek-V3.2",
"kimi-k2-turbo-preview",
@@ -38,6 +48,18 @@ func TestFilterVolcengineCodingPlanModels_KeepsOnlySupportedFamilies(t *testing.
}
}
func TestFilterVolcengineCodingPlanModels_DoesNotBroadlyMatchAutoKeyword(t *testing.T) {
filtered := filterVolcengineCodingPlanModels([]string{
"Auto",
"automatic-router-preview",
})
expected := []string{"Auto"}
if !reflect.DeepEqual(filtered, expected) {
t.Fatalf("expected only exact Auto model to remain, got %v", filtered)
}
}
func TestFilterFetchedModelsForProvider_DoesNotFilterVolcengineArk(t *testing.T) {
rawModels := []string{
"qwen3-14b-20250429",

View File

@@ -25,7 +25,7 @@ type Tool struct {
// Message 表示一条对话消息
type Message struct {
Role string `json:"role"` // "system" | "user" | "assistant" | "tool"
Role string `json:"role"` // "system" | "user" | "assistant" | "tool"
Content string `json:"content"`
Images []string `json:"images,omitempty"` // base64 encoded images with data:image/png;base64,... prefix
ToolCallID string `json:"tool_call_id,omitempty"` // 当 role 为 "tool" 时必须传递
@@ -66,13 +66,13 @@ type StreamChunk struct {
// ProviderConfig AI Provider 配置
type ProviderConfig struct {
ID string `json:"id"`
Type string `json:"type"` // openai | anthropic | gemini | custom
Type string `json:"type"` // openai | anthropic | gemini | custom
Name string `json:"name"`
APIKey string `json:"apiKey"`
BaseURL string `json:"baseUrl"`
Model string `json:"model"`
Models []string `json:"models,omitempty"`
APIFormat string `json:"apiFormat,omitempty"` // custom 专用: openai | anthropic | gemini
APIFormat string `json:"apiFormat,omitempty"` // custom 专用: openai | anthropic | gemini | claude-cli
Headers map[string]string `json:"headers,omitempty"`
MaxTokens int `json:"maxTokens"`
Temperature float64 `json:"temperature"`