1
0

refactor: 优化 URL 路径拼接,修复 /v1 重复问题

## 主要变更

**核心修改**:
- 路由定义:/:protocol/v1/*path → /:protocol/*path
- proxy_handler:nativePath 直接使用 path 参数,不添加 /v1 前缀
- OpenAI 适配器:DetectInterfaceType 和 BuildUrl 去掉 /v1 前缀
- Anthropic 适配器:保持 /v1 前缀(Claude Code 兼容)

**URL 格式变化**:
- OpenAI: /openai/v1/chat/completions → /openai/chat/completions
- Anthropic: /anthropic/v1/messages (保持不变)

**base_url 配置**:
- OpenAI: 配置到版本路径,如 https://api.openai.com/v1
- Anthropic: 不配置版本路径,如 https://api.anthropic.com

## 测试验证

- 所有单元测试通过
- 所有集成测试通过
- 真实 API 测试验证成功
- 跨协议转换正常工作

## 文档更新

- 更新 backend/README.md URL 格式说明
- 同步 OpenSpec 规范文件
This commit is contained in:
2026-04-21 20:21:17 +08:00
parent 24f03595a7
commit b7e205f4b6
16 changed files with 193 additions and 145 deletions

View File

@@ -77,7 +77,7 @@ func setupConversionTest(t *testing.T) (*gin.Engine, *gorm.DB, *httptest.Server)
r.Use(middleware.CORS())
// 代理路由
r.Any("/:protocol/v1/*path", proxyHandler.HandleProxy)
r.Any("/:protocol/*path", proxyHandler.HandleProxy)
// 管理路由
providers := r.Group("/api/providers")
@@ -177,7 +177,7 @@ func TestConversion_OpenAIToAnthropic_NonStream(t *testing.T) {
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -202,7 +202,7 @@ func TestConversion_AnthropicToOpenAI_NonStream(t *testing.T) {
var req map[string]any
json.Unmarshal(body, &req)
assert.Equal(t, "/v1/chat/completions", r.URL.Path)
assert.Equal(t, "/chat/completions", r.URL.Path)
assert.Contains(t, r.Header.Get("Authorization"), "Bearer test-key")
resp := map[string]any{
@@ -262,7 +262,7 @@ func TestConversion_OpenAIToOpenAI_Passthrough(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "/v1/chat/completions", r.URL.Path)
assert.Equal(t, "/chat/completions", r.URL.Path)
body, _ := io.ReadAll(r.Body)
var req map[string]any
@@ -284,7 +284,7 @@ func TestConversion_OpenAIToOpenAI_Passthrough(t *testing.T) {
body, _ := json.Marshal(reqBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -364,7 +364,7 @@ func TestConversion_OpenAIToAnthropic_Stream(t *testing.T) {
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -483,7 +483,7 @@ func TestConversion_ErrorResponse_Format(t *testing.T) {
// OpenAI 协议格式
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.True(t, w.Code >= 400)
@@ -499,15 +499,15 @@ func TestConversion_OldRoutes_Return404(t *testing.T) {
req := httptest.NewRequest("POST", "/v1/chat/completions", strings.NewReader(`{"model":"test"}`))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
// Gin 路由匹配返回 404
assert.Equal(t, 404, w.Code)
// Gin 路由匹配但协议不支持返回 400
assert.Equal(t, 400, w.Code)
// 旧 Anthropic 路由
w = httptest.NewRecorder()
req = httptest.NewRequest("POST", "/v1/messages", strings.NewReader(`{"model":"test"}`))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 404, w.Code)
assert.Equal(t, 400, w.Code)
}
// ============ Provider Protocol 字段测试 ============

View File

@@ -67,7 +67,7 @@ func setupE2ETest(t *testing.T) (*gin.Engine, *httptest.Server) {
r := gin.New()
r.Use(middleware.CORS())
r.Any("/:protocol/v1/*path", proxyHandler.HandleProxy)
r.Any("/:protocol/*path", proxyHandler.HandleProxy)
providers := r.Group("/api/providers")
{
@@ -150,7 +150,7 @@ func parseOpenAIStreamChunks(body string) []string {
func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
assert.Equal(t, "/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-001",
@@ -177,7 +177,7 @@ func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -233,7 +233,7 @@ func TestE2E_OpenAI_NonStream_MultiTurn(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -290,7 +290,7 @@ func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -333,7 +333,7 @@ func TestE2E_OpenAI_NonStream_MaxTokens_Length(t *testing.T) {
"max_tokens": 30,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -369,7 +369,7 @@ func TestE2E_OpenAI_NonStream_UsageWithReasoning(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "15+23*2=?"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -410,7 +410,7 @@ func TestE2E_OpenAI_NonStream_Refusal(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "做坏事"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -453,7 +453,7 @@ func TestE2E_OpenAI_Stream_Text(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -507,7 +507,7 @@ func TestE2E_OpenAI_Stream_ToolCalls(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -546,7 +546,7 @@ func TestE2E_OpenAI_Stream_WithUsage(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -967,7 +967,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_RequestFormat(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -982,7 +982,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_RequestFormat(t *testing.T) {
func TestE2E_CrossProtocol_AnthropicToOpenAI_RequestFormat(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
assert.Equal(t, "/chat/completions", req.URL.Path)
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
require.NoError(t, json.Unmarshal(body, &reqBody))
@@ -1050,7 +1050,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1065,7 +1065,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream(t *testing.T) {
func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
assert.Equal(t, "/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
@@ -1127,7 +1127,7 @@ func TestE2E_OpenAI_ErrorResponse(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1212,7 +1212,7 @@ func TestE2E_OpenAI_NonStream_ParallelToolCalls(t *testing.T) {
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1253,7 +1253,7 @@ func TestE2E_OpenAI_NonStream_StopSequence(t *testing.T) {
"stop": []string{"5"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1290,7 +1290,7 @@ func TestE2E_OpenAI_NonStream_ContentFilter(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "危险内容"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1569,7 +1569,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_NonStream_ToolCalls(t *testing.T) {
}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1588,7 +1588,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_NonStream_ToolCalls(t *testing.T) {
func TestE2E_CrossProtocol_AnthropicToOpenAI_NonStream_Thinking(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
assert.Equal(t, "/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-cross-think", "object": "chat.completion", "model": "gpt-4", "created": time.Now().Unix(),
@@ -1640,7 +1640,7 @@ func TestE2E_CrossProtocol_StopReasonMapping(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "长文"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1689,7 +1689,7 @@ func TestE2E_OpenAI_NonStream_AssistantWithToolResult(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1740,7 +1740,7 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1753,7 +1753,7 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
assert.Equal(t, "/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
@@ -1816,7 +1816,7 @@ func TestE2E_OpenAI_Upstream5xx_ErrorPassthrough(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)