1
0
Files
nex/backend/tests/integration/conversion_test.go
lanyuanxiaoyao d92db73937 refactor: 后端代码质量优化 - 复用公共库、使用标准库、类型安全错误判断
## 高优先级修复
- stats_service_impl: 使用 strings.SplitN 替代错误的索引分割
- provider_handler: 使用 errors.Is(err, gorm.ErrDuplicatedKey) 替代字符串匹配
- client: 重写 isNetworkError 使用 errors.As/Is 类型安全判断
- proxy_handler: 使用 encoding/json 标准库解析 JSON(extractModelName、isStreamRequest)

## 中优先级修复
- stats_handler: 添加 parseDateParam 辅助函数消除重复日期解析
- pkg/errors: 新增 ErrRequestCreate/Send/ResponseRead 错误类型和 WithCause 方法
- client: 使用结构化错误替代 fmt.Errorf
- ConversionEngine: logger 依赖注入,替换所有 zap.L() 调用

## 低优先级修复
- encoder: 删除 joinStrings,使用 strings.Join
- adapter: 删除 modelInfoRegex 正则,使用 isModelInfoPath 字符串函数

## 文档更新
- README.md: 添加公共库使用指南和编码规范章节
- specs: 同步 delta specs 到 main specs(error-handling、structured-logging、request-validation)

## 归档
- openspec/changes/archive/2026-04-20-refactor-backend-code-quality/
2026-04-20 16:42:48 +08:00

572 lines
18 KiB
Go
Raw Permalink Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
package integration
import (
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/http/httptest"
"strings"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"nex/backend/internal/config"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/anthropic"
openaiConv "nex/backend/internal/conversion/openai"
"nex/backend/internal/handler"
"nex/backend/internal/handler/middleware"
"nex/backend/internal/provider"
"nex/backend/internal/repository"
"nex/backend/internal/service"
)
func init() {
gin.SetMode(gin.TestMode)
}
// setupConversionTest 创建包含 ConversionEngine 的完整测试环境
func setupConversionTest(t *testing.T) (*gin.Engine, *gorm.DB, *httptest.Server) {
t.Helper()
// 创建 mock 上游服务器
upstream := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// 默认返回成功,由各测试 case 覆盖
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{"error":"not mocked"}`))
}))
dir := t.TempDir()
db, err := gorm.Open(sqlite.Open(dir+"/test.db"), &gorm.Config{})
require.NoError(t, err)
err = db.AutoMigrate(&config.Provider{}, &config.Model{}, &config.UsageStats{})
require.NoError(t, err)
t.Cleanup(func() {
sqlDB, _ := db.DB()
if sqlDB != nil {
sqlDB.Close()
}
upstream.Close()
})
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
statsRepo := repository.NewStatsRepository(db)
providerService := service.NewProviderService(providerRepo)
modelService := service.NewModelService(modelRepo, providerRepo)
routingService := service.NewRoutingService(modelRepo, providerRepo)
statsService := service.NewStatsService(statsRepo)
// 创建 ConversionEngine
registry := conversion.NewMemoryRegistry()
require.NoError(t, registry.Register(openaiConv.NewAdapter()))
require.NoError(t, registry.Register(anthropic.NewAdapter()))
engine := conversion.NewConversionEngine(registry, nil)
providerClient := provider.NewClient()
proxyHandler := handler.NewProxyHandler(engine, providerClient, routingService, providerService, statsService)
providerHandler := handler.NewProviderHandler(providerService)
modelHandler := handler.NewModelHandler(modelService)
statsHandler := handler.NewStatsHandler(statsService)
_ = modelService
r := gin.New()
r.Use(middleware.CORS())
// 代理路由
r.Any("/:protocol/v1/*path", proxyHandler.HandleProxy)
// 管理路由
providers := r.Group("/api/providers")
{
providers.GET("", providerHandler.ListProviders)
providers.POST("", providerHandler.CreateProvider)
providers.GET("/:id", providerHandler.GetProvider)
providers.PUT("/:id", providerHandler.UpdateProvider)
providers.DELETE("/:id", providerHandler.DeleteProvider)
}
models := r.Group("/api/models")
{
models.GET("", modelHandler.ListModels)
models.POST("", modelHandler.CreateModel)
models.GET("/:id", modelHandler.GetModel)
models.PUT("/:id", modelHandler.UpdateModel)
models.DELETE("/:id", modelHandler.DeleteModel)
}
_ = statsHandler
return r, db, upstream
}
// createProviderAndModel 辅助:创建供应商和模型
func createProviderAndModel(t *testing.T, r *gin.Engine, providerID, protocol, modelName string, upstreamURL string) {
t.Helper()
providerBody, _ := json.Marshal(map[string]string{
"id": providerID,
"name": providerID,
"api_key": "test-key",
"base_url": upstreamURL,
"protocol": protocol,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/api/providers", bytes.NewReader(providerBody))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
modelBody, _ := json.Marshal(map[string]string{
"id": modelName,
"provider_id": providerID,
"model_name": modelName,
})
w = httptest.NewRecorder()
req = httptest.NewRequest("POST", "/api/models", bytes.NewReader(modelBody))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
}
// ============ 跨协议非流式转换测试 ============
func TestConversion_OpenAIToAnthropic_NonStream(t *testing.T) {
r, _, upstream := setupConversionTest(t)
// 配置上游返回 Anthropic 格式响应
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
// 验证请求被转换为 Anthropic 格式
body, _ := io.ReadAll(r.Body)
var req map[string]any
json.Unmarshal(body, &req)
assert.Equal(t, "/v1/messages", r.URL.Path)
assert.Contains(t, r.Header.Get("Content-Type"), "application/json")
// 返回 Anthropic 响应
resp := map[string]any{
"id": "msg_test",
"type": "message",
"role": "assistant",
"model": "claude-3-opus",
"content": []map[string]any{
{"type": "text", "text": "Hello from Anthropic!"},
},
"stop_reason": "end_turn",
"usage": map[string]any{
"input_tokens": 10,
"output_tokens": 20,
},
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
})
createProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-3-opus", upstream.URL)
// 使用 OpenAI 格式发送请求
openaiReq := map[string]any{
"model": "claude-3-opus",
"messages": []map[string]any{
{"role": "user", "content": "Hello"},
},
"stream": false,
}
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
json.Unmarshal(w.Body.Bytes(), &resp)
assert.Equal(t, "chat.completion", resp["object"])
choices := resp["choices"].([]any)
require.Len(t, choices, 1)
choice := choices[0].(map[string]any)
msg := choice["message"].(map[string]any)
assert.Contains(t, msg["content"], "Hello from Anthropic!")
}
func TestConversion_AnthropicToOpenAI_NonStream(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
body, _ := io.ReadAll(r.Body)
var req map[string]any
json.Unmarshal(body, &req)
assert.Equal(t, "/v1/chat/completions", r.URL.Path)
assert.Contains(t, r.Header.Get("Authorization"), "Bearer test-key")
resp := map[string]any{
"id": "chatcmpl-test",
"object": "chat.completion",
"model": "gpt-4",
"created": time.Now().Unix(),
"choices": []map[string]any{
{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "Hello from OpenAI!"},
"finish_reason": "stop",
},
},
"usage": map[string]any{
"prompt_tokens": 10,
"completion_tokens": 20,
"total_tokens": 30,
},
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
})
createProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
anthropicReq := map[string]any{
"model": "gpt-4",
"max_tokens": 1024,
"messages": []map[string]any{
{"role": "user", "content": "Hello"},
},
"stream": false,
}
body, _ := json.Marshal(anthropicReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
json.Unmarshal(w.Body.Bytes(), &resp)
assert.Equal(t, "message", resp["type"])
content := resp["content"].([]any)
require.Len(t, content, 1)
block := content[0].(map[string]any)
assert.Contains(t, block["text"], "Hello from OpenAI!")
}
// ============ 同协议透传测试 ============
func TestConversion_OpenAIToOpenAI_Passthrough(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "/v1/chat/completions", r.URL.Path)
body, _ := io.ReadAll(r.Body)
var req map[string]any
json.Unmarshal(body, &req)
assert.Equal(t, "gpt-4", req["model"])
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"id":"chatcmpl-pass","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"passthrough"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":1,"total_tokens":6}}`))
})
createProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
reqBody := map[string]any{
"model": "gpt-4",
"messages": []map[string]any{{"role": "user", "content": "test"}},
}
body, _ := json.Marshal(reqBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Body.String(), "passthrough")
}
func TestConversion_AnthropicToAnthropic_Passthrough(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "/v1/messages", r.URL.Path)
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{"id":"msg-pass","type":"message","role":"assistant","model":"claude-3-opus","content":[{"type":"text","text":"passthrough"}],"stop_reason":"end_turn","usage":{"input_tokens":5,"output_tokens":1}}`))
})
createProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-3-opus", upstream.URL)
reqBody := map[string]any{
"model": "claude-3-opus",
"max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "test"}},
}
body, _ := json.Marshal(reqBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Body.String(), "passthrough")
}
// ============ 流式转换测试 ============
func TestConversion_OpenAIToAnthropic_Stream(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
events := []string{
"event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream\",\"model\":\"claude-3-opus\",\"usage\":{\"input_tokens\":10,\"output_tokens\":0}}}\n\n",
"event: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"Hi\"}}\n\n",
"event: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
"event: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\"},\"usage\":{\"output_tokens\":5}}\n\n",
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n",
}
for _, e := range events {
w.Write([]byte(e))
if f, ok := w.(http.Flusher); ok {
f.Flush()
}
}
})
createProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-3-opus", upstream.URL)
openaiReq := map[string]any{
"model": "claude-3-opus",
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
"stream": true,
}
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
ct := w.Header().Get("Content-Type")
assert.Contains(t, ct, "text/event-stream")
}
func TestConversion_AnthropicToOpenAI_Stream(t *testing.T) {
r, _, upstream := setupConversionTest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
events := []string{
fmt.Sprintf("data: {\"id\":\"chatcmpl-s\",\"object\":\"chat.completion.chunk\",\"model\":\"gpt-4\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\"}}]}\n\n"),
"data: {\"id\":\"chatcmpl-s\",\"object\":\"chat.completion.chunk\",\"model\":\"gpt-4\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hey\"}}]}\n\n",
"data: {\"id\":\"chatcmpl-s\",\"object\":\"chat.completion.chunk\",\"model\":\"gpt-4\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"!\"}}]}\n\n",
"data: {\"id\":\"chatcmpl-s\",\"object\":\"chat.completion.chunk\",\"model\":\"gpt-4\",\"choices\":[{\"index\":0,\"delta\":{},\"finish_reason\":\"stop\"}]}\n\n",
"data: [DONE]\n\n",
}
for _, e := range events {
w.Write([]byte(e))
if f, ok := w.(http.Flusher); ok {
f.Flush()
}
}
})
createProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
anthropicReq := map[string]any{
"model": "gpt-4",
"max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
"stream": true,
}
body, _ := json.Marshal(anthropicReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
ct := w.Header().Get("Content-Type")
assert.Contains(t, ct, "text/event-stream")
}
// ============ Models 接口测试 ============
func TestConversion_Models_CrossProtocol(t *testing.T) {
// 测试 Models 接口跨协议转换的编解码逻辑
// 由于 GET /models 无 body 无法路由,此处测试 adapter 级别的编解码
registry := conversion.NewMemoryRegistry()
require.NoError(t, registry.Register(openaiConv.NewAdapter()))
require.NoError(t, registry.Register(anthropic.NewAdapter()))
openaiAdapter, _ := registry.Get("openai")
anthropicAdapter, _ := registry.Get("anthropic")
// 模拟 OpenAI 格式的 models 响应
openaiModelsBody := []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model","created":1700000000,"owned_by":"openai"},{"id":"gpt-3.5-turbo","object":"model","created":1700000001,"owned_by":"openai"}]}`)
// OpenAI decode → Canonical → Anthropic encode
modelList, err := openaiAdapter.DecodeModelsResponse(openaiModelsBody)
require.NoError(t, err)
assert.Len(t, modelList.Models, 2)
assert.Equal(t, "gpt-4", modelList.Models[0].ID)
// 编码为 Anthropic 格式
anthropicBody, err := anthropicAdapter.EncodeModelsResponse(modelList)
require.NoError(t, err)
var anthropicResp map[string]any
json.Unmarshal(anthropicBody, &anthropicResp)
data := anthropicResp["data"].([]any)
assert.Len(t, data, 2)
first := data[0].(map[string]any)
assert.Equal(t, "gpt-4", first["id"])
assert.Equal(t, "model", first["type"])
// 反向测试Anthropic decode → Canonical → OpenAI encode
anthropicModelsBody := []byte(`{"data":[{"id":"claude-3-opus","type":"model","display_name":"Claude 3 Opus","created_at":"2025-01-01T00:00:00Z"}],"has_more":false}`)
modelList2, err := anthropicAdapter.DecodeModelsResponse(anthropicModelsBody)
require.NoError(t, err)
assert.Len(t, modelList2.Models, 1)
assert.Equal(t, "Claude 3 Opus", modelList2.Models[0].Name)
openaiBody, err := openaiAdapter.EncodeModelsResponse(modelList2)
require.NoError(t, err)
var openaiResp map[string]any
json.Unmarshal(openaiBody, &err)
json.Unmarshal(openaiBody, &openaiResp)
oaiData := openaiResp["data"].([]any)
assert.Len(t, oaiData, 1)
firstOai := oaiData[0].(map[string]any)
assert.Equal(t, "claude-3-opus", firstOai["id"])
}
// ============ 错误响应测试 ============
func TestConversion_ErrorResponse_Format(t *testing.T) {
r, _, _ := setupConversionTest(t)
// 请求不存在的模型
reqBody := map[string]any{
"model": "nonexistent",
"messages": []map[string]any{{"role": "user", "content": "test"}},
}
body, _ := json.Marshal(reqBody)
// OpenAI 协议格式
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.True(t, w.Code >= 400)
}
// ============ 旧路由返回 404 ============
func TestConversion_OldRoutes_Return404(t *testing.T) {
r, _, _ := setupConversionTest(t)
// 旧 OpenAI 路由
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/v1/chat/completions", strings.NewReader(`{"model":"test"}`))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
// Gin 路由不匹配返回 404
assert.Equal(t, 404, w.Code)
// 旧 Anthropic 路由
w = httptest.NewRecorder()
req = httptest.NewRequest("POST", "/v1/messages", strings.NewReader(`{"model":"test"}`))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 404, w.Code)
}
// ============ Provider Protocol 字段测试 ============
func TestConversion_ProviderWithProtocol(t *testing.T) {
r, _, _ := setupConversionTest(t)
// 创建带 protocol 字段的 provider
providerBody := map[string]any{
"id": "test-protocol",
"name": "Test Protocol",
"api_key": "sk-test",
"base_url": "https://test.com",
"protocol": "anthropic",
}
body, _ := json.Marshal(providerBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/api/providers", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
var created map[string]any
json.Unmarshal(w.Body.Bytes(), &created)
// API Key 被掩码
assert.Contains(t, created["api_key"], "***")
// 获取时应包含 protocol
w = httptest.NewRecorder()
req = httptest.NewRequest("GET", "/api/providers/test-protocol", nil)
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var fetched map[string]any
json.Unmarshal(w.Body.Bytes(), &fetched)
assert.Equal(t, "anthropic", fetched["protocol"])
}
func TestConversion_ProviderDefaultProtocol(t *testing.T) {
r, _, _ := setupConversionTest(t)
// 不指定 protocol默认应为 openai
providerBody := map[string]any{
"id": "default-proto",
"name": "Default",
"api_key": "sk-test",
"base_url": "https://test.com",
}
body, _ := json.Marshal(providerBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/api/providers", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
var created map[string]any
json.Unmarshal(w.Body.Bytes(), &created)
assert.Equal(t, "openai", created["protocol"])
}
// Suppress unused imports
var _ = fmt.Sprintf
var _ = strings.Contains
var _ = time.Second