🐛 fix(ai/query-editor/mac-window): 修复模型兼容性并优化即时执行与窗口交互

- AI 兼容性:为 Anthropic Provider 补齐 tools/tool_use/tool_result 协议转换,支持工具调用与流式工具结果解析
- 降级策略:OpenAI 兼容接口在 tools 请求返回 400/422/404 时自动回退为纯文本模式
- 配置修复:调整 MiniMax 预设为 Anthropic 兼容端点并更新默认模型列表
- 状态隔离:AI 聊天面板停止将动态模型列表写回供应商配置,避免污染静态 models 数据
- 编辑器修复:QueryEditor 在 runImmediately 场景下避免重复追加 SQL,改为直接选中并执行
- 交互优化:修复 macOS 原生窗口控制切换与标题栏点击行为,避免窗口按钮状态异常
This commit is contained in:
Syngnat
2026-03-26 17:57:29 +08:00
parent 98e9e5686d
commit b958ff6481
8 changed files with 246 additions and 61 deletions

View File

@@ -1215,9 +1215,6 @@ function App() {
if (target?.closest('[data-no-titlebar-toggle="true"]')) {
return;
}
if (useNativeMacWindowControls) {
return;
}
void handleTitleBarWindowToggle();
};

View File

@@ -333,21 +333,8 @@ export const AIChatPanel: React.FC<AIChatPanelProps> = ({
}
}, [activeProvider?.id]);
useEffect(() => {
if (activeProvider && dynamicModels.length > 0) {
const currentModels = activeProvider.models || [];
if (JSON.stringify(currentModels) !== JSON.stringify(dynamicModels)) {
try {
const Service = (window as any).go?.aiservice?.Service;
const payload = { ...activeProvider, models: dynamicModels };
Service?.AISaveProvider?.(payload);
setActiveProvider(payload);
} catch (e) {
console.warn('Failed to cache models', e);
}
}
}
}, [activeProvider, dynamicModels]);
// dynamicModels 仅在内存中使用,不再写回供应商配置,避免污染静态 models 列表
const fetchDynamicModels = useCallback(async () => {
try {

View File

@@ -34,7 +34,7 @@ const PROVIDER_PRESETS: ProviderPreset[] = [
{ key: 'anthropic', label: 'Claude', icon: <ExperimentOutlined />, desc: 'Claude Opus/Sonnet 4.6', color: '#d97706', backendType: 'anthropic', defaultBaseUrl: 'https://api.anthropic.com', defaultModel: 'claude-sonnet-4-6', models: ['claude-opus-4-6', 'claude-sonnet-4-6'] },
{ key: 'gemini', label: 'Gemini', icon: <CloudOutlined />, desc: 'Gemini 3.1 / 2.5 系列', color: '#059669', backendType: 'gemini', defaultBaseUrl: 'https://generativelanguage.googleapis.com', defaultModel: 'gemini-2.5-flash', models: ['gemini-3.1-pro', 'gemini-2.5-flash', 'gemini-2.5-pro'] },
{ key: 'volcengine', label: '火山引擎', icon: <CloudOutlined />, desc: '火山方舟 / 豆包大模型', color: '#0ea5e9', backendType: 'openai', defaultBaseUrl: 'https://ark.cn-beijing.volces.com/api/v3', defaultModel: 'ep-xxxxxx', models: [] },
{ key: 'minimax', label: 'MiniMax', icon: <ExperimentOutlined />, desc: 'abab6.5 / abab7 系列', color: '#e11d48', backendType: 'openai', defaultBaseUrl: 'https://api.minimax.chat/v1', defaultModel: 'abab7-chat-preview', models: ['abab7-chat-preview', 'abab6.5-chat', 'abab6.5g-chat'] },
{ key: 'minimax', label: 'MiniMax', icon: <ExperimentOutlined />, desc: 'abab6.5 / abab7 系列', color: '#e11d48', backendType: 'anthropic', defaultBaseUrl: 'https://api.minimaxi.com/anthropic', defaultModel: 'MiniMax-Text-01', models: ['MiniMax-Text-01', 'MiniMax-Text-01-vision', 'MiniMax-Text-01-search', 'MiniMax-Text-01-code', 'MiniMax-Text-01-web', 'MiniMax-Text-01-sql', 'MiniMax-Text-01-python', 'MiniMax-Text-01-math', 'MiniMax-Text-01-doc'] },
{ key: 'ollama', label: 'Ollama', icon: <AppstoreOutlined />, desc: '本地部署开源模型', color: '#78716c', backendType: 'openai', defaultBaseUrl: 'http://localhost:11434/v1', defaultModel: 'llama3', models: [] },
{ key: 'custom', label: '自定义', icon: <AppstoreOutlined />, desc: '自定义 API 端点', color: '#64748b', backendType: 'custom', defaultBaseUrl: '', defaultModel: '', models: [] },
];

View File

@@ -2040,11 +2040,25 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
setCurrentDb(dbName);
}
const editor = editorRef.current;
const monaco = monacoRef.current;
if (editor && monaco) {
let position = editor.getPosition();
const model = editor.getModel();
const existingContent = editor.getValue?.() || '';
// runImmediately 模式下,如果编辑器内容已是待注入的 SQLTabManager 创建时已传入),
// 跳过追加,直接选中全部内容并执行
if (e.detail.runImmediately && existingContent.trim() === sqlText.trim()) {
if (model) {
const lineCount = model.getLineCount();
const maxCol = model.getLineMaxColumn(lineCount);
editor.setSelection(new monaco.Range(1, 1, lineCount, maxCol));
editor.focus();
setTimeout(() => handleRun(), 500);
}
} else {
let position = editor.getPosition();
if (!position && model) {
const lineCount = model.getLineCount();
const maxCol = model.getLineMaxColumn(lineCount);
@@ -2081,6 +2095,7 @@ const QueryEditor: React.FC<{ tab: TabData }> = ({ tab }) => {
setTimeout(() => handleRun(), 500);
}
}
}
} else {
setQuery((prev: string) => prev ? prev + '\n' + sqlText : sqlText);
message.success('代码已追加');

View File

@@ -151,7 +151,7 @@ const TabManager: React.FC = () => {
id: newTabId,
type: 'query',
title: '新建查询',
query: '',
query: sql,
connectionId: resolvedConnId,
dbName: resolvedDbName
});

View File

@@ -72,13 +72,23 @@ func (p *AnthropicProvider) Validate() error {
return nil
}
// --- 请求体类型 ---
type anthropicRequest struct {
Model string `json:"model"`
Messages []anthropicMessage `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature,omitempty"`
Stream bool `json:"stream,omitempty"`
Model string `json:"model"`
Messages []anthropicMessage `json:"messages"`
System string `json:"system,omitempty"`
MaxTokens int `json:"max_tokens"`
Temperature float64 `json:"temperature,omitempty"`
Stream bool `json:"stream,omitempty"`
Tools []anthropicTool `json:"tools,omitempty"`
}
// anthropicTool Anthropic 格式的工具定义
type anthropicTool struct {
Name string `json:"name"`
Description string `json:"description,omitempty"`
InputSchema any `json:"input_schema"`
}
type anthropicMessage struct {
@@ -86,9 +96,66 @@ type anthropicMessage struct {
Content interface{} `json:"content"`
}
// convertToolsToAnthropic 将 OpenAI 格式的 tools 转换为 Anthropic 格式
func convertToolsToAnthropic(tools []ai.Tool) []anthropicTool {
if len(tools) == 0 {
return nil
}
result := make([]anthropicTool, 0, len(tools))
for _, t := range tools {
result = append(result, anthropicTool{
Name: t.Function.Name,
Description: t.Function.Description,
InputSchema: t.Function.Parameters,
})
}
return result
}
func buildAnthropicMessages(reqMessages []ai.Message) []anthropicMessage {
messages := make([]anthropicMessage, 0, len(reqMessages))
for _, m := range reqMessages {
// tool result 消息:转换为 Anthropic 的 tool_result content block
if m.Role == "tool" {
messages = append(messages, anthropicMessage{
Role: "user",
Content: []map[string]interface{}{
{
"type": "tool_result",
"tool_use_id": m.ToolCallID,
"content": m.Content,
},
},
})
continue
}
// assistant 带 tool_calls转换为 Anthropic 的 tool_use content block
if m.Role == "assistant" && len(m.ToolCalls) > 0 {
var contentParts []map[string]interface{}
if m.Content != "" {
contentParts = append(contentParts, map[string]interface{}{
"type": "text",
"text": m.Content,
})
}
for _, tc := range m.ToolCalls {
var input interface{}
if err := json.Unmarshal([]byte(tc.Function.Arguments), &input); err != nil {
input = map[string]interface{}{}
}
contentParts = append(contentParts, map[string]interface{}{
"type": "tool_use",
"id": tc.ID,
"name": tc.Function.Name,
"input": input,
})
}
messages = append(messages, anthropicMessage{Role: "assistant", Content: contentParts})
continue
}
// 图片消息
if len(m.Images) > 0 {
var contentParts []map[string]interface{}
for _, img := range m.Images {
@@ -106,7 +173,7 @@ func buildAnthropicMessages(reqMessages []ai.Message) []anthropicMessage {
}
text := m.Content
if text == "" {
text = "请描述和分析这张图片。" // 防止强 System Prompt 下模型仅看到空文本且忽略图片直接回复打招呼
text = "请描述和分析这张图片。"
}
contentParts = append(contentParts, map[string]interface{}{
"type": "text",
@@ -120,11 +187,19 @@ func buildAnthropicMessages(reqMessages []ai.Message) []anthropicMessage {
return messages
}
// --- 响应体类型 ---
type anthropicContentBlock struct {
Type string `json:"type"` // "text" | "tool_use"
Text string `json:"text,omitempty"`
ID string `json:"id,omitempty"` // tool_use
Name string `json:"name,omitempty"` // tool_use
Input json.RawMessage `json:"input,omitempty"` // tool_use
}
type anthropicResponse struct {
Content []struct {
Text string `json:"text"`
} `json:"content"`
Usage struct {
Content []anthropicContentBlock `json:"content"`
Usage struct {
InputTokens int `json:"input_tokens"`
OutputTokens int `json:"output_tokens"`
} `json:"usage"`
@@ -133,13 +208,20 @@ type anthropicResponse struct {
} `json:"error,omitempty"`
}
// 流式事件类型
type anthropicStreamEvent struct {
Type string `json:"type"`
Delta *struct {
Text string `json:"text"`
Type string `json:"type"`
Index int `json:"index,omitempty"`
ContentBlock *anthropicContentBlock `json:"content_block,omitempty"`
Delta *struct {
Type string `json:"type,omitempty"`
Text string `json:"text,omitempty"`
PartialJSON string `json:"partial_json,omitempty"`
} `json:"delta,omitempty"`
}
// --- Chat 非流式 ---
func (p *AnthropicProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.ChatResponse, error) {
if err := p.Validate(); err != nil {
return nil, err
@@ -163,6 +245,7 @@ func (p *AnthropicProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
System: systemMsg,
MaxTokens: maxTokens,
Temperature: temperature,
Tools: convertToolsToAnthropic(req.Tools),
}
respBody, err := p.doRequest(ctx, body)
@@ -182,8 +265,35 @@ func (p *AnthropicProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
return nil, fmt.Errorf("Anthropic 返回空响应")
}
// 解析响应中的 text 和 tool_use content blocks
var textContent string
var toolCalls []ai.ToolCall
for _, block := range result.Content {
switch block.Type {
case "text":
textContent += block.Text
case "tool_use":
argsStr := "{}"
if len(block.Input) > 0 {
argsStr = string(block.Input)
}
toolCalls = append(toolCalls, ai.ToolCall{
ID: block.ID,
Type: "function",
Function: struct {
Name string `json:"name"`
Arguments string `json:"arguments"`
}{
Name: block.Name,
Arguments: argsStr,
},
})
}
}
return &ai.ChatResponse{
Content: result.Content[0].Text,
Content: textContent,
ToolCalls: toolCalls,
TokensUsed: ai.TokenUsage{
PromptTokens: result.Usage.InputTokens,
CompletionTokens: result.Usage.OutputTokens,
@@ -192,6 +302,8 @@ func (p *AnthropicProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
}, nil
}
// --- ChatStream 流式 ---
func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest, callback func(ai.StreamChunk)) error {
if err := p.Validate(); err != nil {
return err
@@ -216,6 +328,7 @@ func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
MaxTokens: maxTokens,
Temperature: temperature,
Stream: true,
Tools: convertToolsToAnthropic(req.Tools),
}
respBody, err := p.doRequest(ctx, body)
@@ -224,6 +337,14 @@ func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
}
defer respBody.Close()
// 跟踪当前活跃的 tool_use blocks
type activeToolUse struct {
id string
name string
argsJSON strings.Builder
}
activeBlocks := make(map[int]*activeToolUse) // index -> block
scanner := bufio.NewScanner(respBody)
for scanner.Scan() {
line := scanner.Text()
@@ -238,10 +359,54 @@ func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
}
switch event.Type {
case "content_block_delta":
if event.Delta != nil && event.Delta.Text != "" {
callback(ai.StreamChunk{Content: event.Delta.Text})
case "content_block_start":
if event.ContentBlock != nil && event.ContentBlock.Type == "tool_use" {
activeBlocks[event.Index] = &activeToolUse{
id: event.ContentBlock.ID,
name: event.ContentBlock.Name,
}
}
case "content_block_delta":
if event.Delta == nil {
continue
}
switch event.Delta.Type {
case "text_delta":
if event.Delta.Text != "" {
callback(ai.StreamChunk{Content: event.Delta.Text})
}
case "input_json_delta":
if block, ok := activeBlocks[event.Index]; ok {
block.argsJSON.WriteString(event.Delta.PartialJSON)
}
}
case "content_block_stop":
if block, ok := activeBlocks[event.Index]; ok {
argsStr := block.argsJSON.String()
if argsStr == "" {
argsStr = "{}"
}
// 产出完整的 tool call
callback(ai.StreamChunk{
ToolCalls: []ai.ToolCall{
{
ID: block.id,
Type: "function",
Function: struct {
Name string `json:"name"`
Arguments string `json:"arguments"`
}{
Name: block.name,
Arguments: argsStr,
},
},
},
})
delete(activeBlocks, event.Index)
}
case "message_stop":
callback(ai.StreamChunk{Done: true})
return nil
@@ -252,6 +417,8 @@ func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
return scanner.Err()
}
// --- HTTP 请求 ---
func (p *AnthropicProvider) doRequest(ctx context.Context, body interface{}) (io.ReadCloser, error) {
jsonBody, err := json.Marshal(body)
if err != nil {
@@ -263,15 +430,6 @@ func (p *AnthropicProvider) doRequest(ctx context.Context, body interface{}) (io
url = p.baseURL + "/messages"
}
// 调试日志:打印实际请求信息
bodyStr := string(jsonBody)
if len(bodyStr) > 500 {
bodyStr = bodyStr[:500] + "..."
}
fmt.Printf("[Anthropic DEBUG] URL: %s\n", url)
fmt.Printf("[Anthropic DEBUG] BaseURL: %s\n", p.baseURL)
fmt.Printf("[Anthropic DEBUG] Body: %s\n", bodyStr)
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(jsonBody))
if err != nil {
return nil, fmt.Errorf("创建 HTTP 请求失败: %w", err)

View File

@@ -209,7 +209,17 @@ func (p *OpenAIProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.Chat
respBody, err := p.doRequest(ctx, body)
if err != nil {
return nil, err
// 当带 tools 的请求返回 400 时,自动降级为不带 tools 的纯文本请求
if len(req.Tools) > 0 && isHTTP400Error(err) {
fmt.Println("[OpenAI] 模型不支持 Function Calling自动降级为纯文本模式")
body.Tools = nil
respBody, err = p.doRequest(ctx, body)
if err != nil {
return nil, err
}
} else {
return nil, err
}
}
defer respBody.Close()
@@ -257,7 +267,17 @@ func (p *OpenAIProvider) ChatStream(ctx context.Context, req ai.ChatRequest, cal
respBody, err := p.doRequest(ctx, body)
if err != nil {
return err
// 当带 tools 的请求返回 400 时,自动降级为不带 tools 的纯文本请求
if len(req.Tools) > 0 && isHTTP400Error(err) {
fmt.Println("[OpenAI] 模型不支持 Function Calling自动降级为纯文本模式")
body.Tools = nil
respBody, err = p.doRequest(ctx, body)
if err != nil {
return err
}
} else {
return err
}
}
defer respBody.Close()
@@ -366,14 +386,7 @@ func (p *OpenAIProvider) doRequest(ctx context.Context, body interface{}) (io.Re
url := p.baseURL + "/chat/completions"
// 调试日志
bodyStr := string(jsonBody)
if len(bodyStr) > 500 {
bodyStr = bodyStr[:500] + "..."
}
fmt.Printf("[OpenAI DEBUG] URL: %s\n", url)
fmt.Printf("[OpenAI DEBUG] BaseURL: %s\n", p.baseURL)
fmt.Printf("[OpenAI DEBUG] Body: %s\n", bodyStr)
httpReq, err := http.NewRequestWithContext(ctx, http.MethodPost, url, bytes.NewReader(jsonBody))
if err != nil {
@@ -408,3 +421,15 @@ func (p *OpenAIProvider) doRequest(ctx context.Context, body interface{}) (io.Re
return resp.Body, nil
}
// isHTTP400Error 检查错误是否为 HTTP 4xx 客户端错误400/422 等),
// 通常表示模型不支持请求中的某些参数(如 tools/functions
func isHTTP400Error(err error) bool {
if err == nil {
return false
}
msg := err.Error()
return strings.Contains(msg, "(HTTP 400)") ||
strings.Contains(msg, "(HTTP 422)") ||
strings.Contains(msg, "(HTTP 404)")
}

View File

@@ -8,6 +8,9 @@ package app
#import <Cocoa/Cocoa.h>
#import <dispatch/dispatch.h>
static inline BOOL gonaviBoolYES() { return YES; }
static inline BOOL gonaviBoolNO() { return NO; }
static void gonaviSetWindowButtonsVisible(NSWindow *window, BOOL visible) {
if (window == nil) {
return;
@@ -62,9 +65,9 @@ import "C"
func setMacNativeWindowControls(enabled bool) {
state := resolveMacNativeWindowControlState(enabled)
flag := C.BOOL(false)
if state.ShowNativeButtons {
flag = C.BOOL(true)
C.gonaviApplyMacWindowStyle(C.gonaviBoolYES())
} else {
C.gonaviApplyMacWindowStyle(C.gonaviBoolNO())
}
C.gonaviApplyMacWindowStyle(flag)
}