🐛 fix(ai): 修正 Anthropic 兼容供应商问答失败

- 为 AnthropicProvider.Chat 与 ChatStream 补充工具调用降级回退
- 首次携带 tools 请求在 400/422/404 时自动去掉 tools 重试一次
- 补充兼容供应商问答回归测试并更新 backlog 记录

Fixes #333
This commit is contained in:
Syngnat
2026-04-17 12:02:23 +08:00
parent 2569a3779a
commit a2cad9f7ce
3 changed files with 176 additions and 2 deletions

View File

@@ -30,6 +30,7 @@
| #329 | 如果调整了左侧导航栏的宽度后,建议左侧导航栏内增加横向滚动查看 | Fixed | `fcade0f` |
| #330 | 建议在查询结果表格中增加自适应内容列宽的功能 | Fixed | `632e57e` |
| #331 | 重复连接 DB一分钟重试了 60 多次 | Fixed | `ca76440` |
| #333 | AI 功能添加供应商测试正常,但问答显示失败 | Fixed | Pending |
| #351 | 为什么没有截断和清空表的功能呀? | Fixed | Pending |
## Notes
@@ -88,6 +89,12 @@
- 处理:将连接自动重试范围收敛到应用启动保护窗口内;稳定期下所有连接探测与重建都只执行一次,避免后台挂起场景持续放大失败流量。
- 验证:补充并更新 `internal/app/app_startup_connect_retry_test.go`,覆盖稳定期瞬时失败不重试、不再输出重试提示,以及启动期仍保留完整重试预算。
### #333
- 根因AI 供应商“测试连接”走的是轻量健康检查,不会带 `tools`;而正式聊天默认会把本地工具定义一起发给模型。当前 `Anthropic` 协议路径缺少和 `OpenAI` 一样的 400 自动降级逻辑,遇到不支持工具调用的兼容端点时会直接报错。
- 处理:为 `AnthropicProvider.Chat / ChatStream` 补充 400 降级回退。首次带 `tools` 请求若返回 400/422/404则自动去掉 `tools` 重试一次,允许不支持 function calling 的兼容端点继续完成普通问答。
- 验证:补充 `internal/ai/provider/anthropic_test.go` 回归测试,覆盖非流式与流式两条链路下“首请求因 tools 返回 400回退后成功”的场景并执行 `go test ./internal/ai/provider -count=1`
### #330
- 根因:查询结果表格已经支持拖拽调整列宽,但 resize handle 没有提供双击自适应逻辑,导致用户只能靠手工拖拽慢慢试宽度。

View File

@@ -283,7 +283,15 @@ func (p *AnthropicProvider) Chat(ctx context.Context, req ai.ChatRequest) (*ai.C
respBody, err := p.doRequest(ctx, body)
if err != nil {
return nil, err
if len(req.Tools) > 0 && isHTTP400Error(err) {
body.Tools = nil
respBody, err = p.doRequest(ctx, body)
if err != nil {
return nil, err
}
} else {
return nil, err
}
}
defer respBody.Close()
@@ -366,7 +374,15 @@ func (p *AnthropicProvider) ChatStream(ctx context.Context, req ai.ChatRequest,
respBody, err := p.doRequest(ctx, body)
if err != nil {
return err
if len(req.Tools) > 0 && isHTTP400Error(err) {
body.Tools = nil
respBody, err = p.doRequest(ctx, body)
if err != nil {
return err
}
} else {
return err
}
}
defer respBody.Close()

View File

@@ -1,8 +1,15 @@
package provider
import (
"context"
"encoding/json"
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"GoNavi-Wails/internal/ai"
)
func TestNormalizeAnthropicMessagesURL_AppendsMessagesSuffix(t *testing.T) {
@@ -55,3 +62,147 @@ func TestApplyAnthropicAuthHeaders_UsesBearerForDashScopeCompatibleAnthropic(t *
t.Fatalf("expected no anthropic-version header for DashScope, got %q", got)
}
}
func TestAnthropicProviderChatRetriesWithoutToolsOnHTTP400(t *testing.T) {
requestCount := 0
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
requestCount++
if r.URL.Path != "/v1/messages" {
t.Fatalf("unexpected request path: %s", r.URL.Path)
}
body, err := io.ReadAll(r.Body)
if err != nil {
t.Fatalf("read request body failed: %v", err)
}
defer r.Body.Close()
var payload map[string]interface{}
if err := json.Unmarshal(body, &payload); err != nil {
t.Fatalf("unmarshal request body failed: %v", err)
}
if _, hasTools := payload["tools"]; hasTools {
http.Error(w, `{"error":{"message":"tools unsupported"}}`, http.StatusBadRequest)
return
}
w.Header().Set("Content-Type", "application/json")
_, _ = w.Write([]byte(`{"content":[{"type":"text","text":"pong"}],"usage":{"input_tokens":1,"output_tokens":1}}`))
}))
defer server.Close()
providerInstance, err := NewAnthropicProvider(ai.ProviderConfig{
Type: "anthropic",
Name: "test-anthropic",
APIKey: "sk-test",
BaseURL: server.URL,
Model: "claude-test",
MaxTokens: 64,
Temperature: 0.1,
})
if err != nil {
t.Fatalf("create provider failed: %v", err)
}
resp, err := providerInstance.Chat(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
Tools: []ai.Tool{{
Type: "function",
Function: ai.ToolFunction{
Name: "get_tables",
Description: "test tool",
Parameters: map[string]interface{}{
"type": "object",
},
},
}},
})
if err != nil {
t.Fatalf("expected chat fallback to succeed, got %v", err)
}
if resp.Content != "pong" {
t.Fatalf("expected fallback content %q, got %q", "pong", resp.Content)
}
if requestCount != 2 {
t.Fatalf("expected 2 requests (with tools then fallback), got %d", requestCount)
}
}
func TestAnthropicProviderChatStreamRetriesWithoutToolsOnHTTP400(t *testing.T) {
requestCount := 0
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
requestCount++
if r.URL.Path != "/v1/messages" {
t.Fatalf("unexpected request path: %s", r.URL.Path)
}
body, err := io.ReadAll(r.Body)
if err != nil {
t.Fatalf("read request body failed: %v", err)
}
defer r.Body.Close()
var payload map[string]interface{}
if err := json.Unmarshal(body, &payload); err != nil {
t.Fatalf("unmarshal request body failed: %v", err)
}
if _, hasTools := payload["tools"]; hasTools {
http.Error(w, `{"error":{"message":"tools unsupported"}}`, http.StatusBadRequest)
return
}
w.Header().Set("Content-Type", "text/event-stream")
_, _ = w.Write([]byte(strings.Join([]string{
`data: {"type":"content_block_delta","delta":{"type":"text_delta","text":"pong"}}`,
``,
`data: {"type":"message_stop"}`,
``,
}, "\n")))
}))
defer server.Close()
providerInstance, err := NewAnthropicProvider(ai.ProviderConfig{
Type: "anthropic",
Name: "test-anthropic",
APIKey: "sk-test",
BaseURL: server.URL,
Model: "claude-test",
MaxTokens: 64,
Temperature: 0.1,
})
if err != nil {
t.Fatalf("create provider failed: %v", err)
}
var chunks []ai.StreamChunk
err = providerInstance.ChatStream(context.Background(), ai.ChatRequest{
Messages: []ai.Message{{Role: "user", Content: "ping"}},
Tools: []ai.Tool{{
Type: "function",
Function: ai.ToolFunction{
Name: "get_tables",
Description: "test tool",
Parameters: map[string]interface{}{
"type": "object",
},
},
}},
}, func(chunk ai.StreamChunk) {
chunks = append(chunks, chunk)
})
if err != nil {
t.Fatalf("expected stream fallback to succeed, got %v", err)
}
if requestCount != 2 {
t.Fatalf("expected 2 requests (with tools then fallback), got %d", requestCount)
}
if len(chunks) < 2 {
t.Fatalf("expected content and done chunks, got %#v", chunks)
}
if chunks[0].Content != "pong" {
t.Fatalf("expected first chunk content %q, got %#v", "pong", chunks[0])
}
if !chunks[len(chunks)-1].Done {
t.Fatalf("expected final done chunk, got %#v", chunks[len(chunks)-1])
}
}