refactor: 后端代码质量优化 - 复用公共库、使用标准库、类型安全错误判断
## 高优先级修复 - stats_service_impl: 使用 strings.SplitN 替代错误的索引分割 - provider_handler: 使用 errors.Is(err, gorm.ErrDuplicatedKey) 替代字符串匹配 - client: 重写 isNetworkError 使用 errors.As/Is 类型安全判断 - proxy_handler: 使用 encoding/json 标准库解析 JSON(extractModelName、isStreamRequest) ## 中优先级修复 - stats_handler: 添加 parseDateParam 辅助函数消除重复日期解析 - pkg/errors: 新增 ErrRequestCreate/Send/ResponseRead 错误类型和 WithCause 方法 - client: 使用结构化错误替代 fmt.Errorf - ConversionEngine: logger 依赖注入,替换所有 zap.L() 调用 ## 低优先级修复 - encoder: 删除 joinStrings,使用 strings.Join - adapter: 删除 modelInfoRegex 正则,使用 isModelInfoPath 字符串函数 ## 文档更新 - README.md: 添加公共库使用指南和编码规范章节 - specs: 同步 delta specs 到 main specs(error-handling、structured-logging、request-validation) ## 归档 - openspec/changes/archive/2026-04-20-refactor-backend-code-quality/
This commit is contained in:
@@ -84,8 +84,9 @@ func (m *mockProxyStatsService) Aggregate(stats []domain.UsageStats, groupBy str
|
||||
func setupProxyEngine(t *testing.T) *conversion.ConversionEngine {
|
||||
t.Helper()
|
||||
registry := conversion.NewMemoryRegistry()
|
||||
engine := conversion.NewConversionEngine(registry)
|
||||
engine := conversion.NewConversionEngine(registry, nil)
|
||||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||||
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
||||
return engine
|
||||
}
|
||||
|
||||
@@ -321,27 +322,6 @@ func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) {
|
||||
assert.Equal(t, 404, w.Code)
|
||||
}
|
||||
|
||||
func TestExtractModelName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
body string
|
||||
want string
|
||||
}{
|
||||
{"basic", `{"model":"gpt-4","messages":[]}`, "gpt-4"},
|
||||
{"nested", `{"stream":true,"model":"claude-3","messages":[]}`, "claude-3"},
|
||||
{"no_model", `{"messages":[]}`, ""},
|
||||
{"empty", "", ""},
|
||||
{"escaped", `{"model":"gpt\"4","messages":[]}`, `gpt\"4`},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := extractModelName([]byte(tt.body))
|
||||
assert.Equal(t, tt.want, got)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractHeaders(t *testing.T) {
|
||||
w := httptest.NewRecorder()
|
||||
c, _ := gin.CreateTestContext(w)
|
||||
@@ -354,30 +334,6 @@ func TestExtractHeaders(t *testing.T) {
|
||||
assert.Equal(t, "application/json", headers["Content-Type"])
|
||||
}
|
||||
|
||||
func TestIsStreamRequest(t *testing.T) {
|
||||
engine := setupProxyEngine(t)
|
||||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
body string
|
||||
path string
|
||||
expected bool
|
||||
}{
|
||||
{"stream true chat", `{"model":"gpt-4","stream":true}`, "/v1/chat/completions", true},
|
||||
{"stream false chat", `{"model":"gpt-4","stream":false}`, "/v1/chat/completions", false},
|
||||
{"no stream field", `{"model":"gpt-4"}`, "/v1/chat/completions", false},
|
||||
{"stream true non-chat", `{"model":"gpt-4","stream":true}`, "/v1/models", false},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := h.isStreamRequest([]byte(tt.body), "openai", tt.path)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) {
|
||||
engine := setupProxyEngine(t)
|
||||
routingSvc := &mockProxyRoutingService{
|
||||
@@ -529,7 +485,7 @@ func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) {
|
||||
|
||||
func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
|
||||
registry := conversion.NewMemoryRegistry()
|
||||
engine := conversion.NewConversionEngine(registry)
|
||||
engine := conversion.NewConversionEngine(registry, nil)
|
||||
err := registry.Register(openai.NewAdapter())
|
||||
require.NoError(t, err)
|
||||
|
||||
@@ -552,7 +508,7 @@ func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
|
||||
|
||||
func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
|
||||
registry := conversion.NewMemoryRegistry()
|
||||
engine := conversion.NewConversionEngine(registry)
|
||||
engine := conversion.NewConversionEngine(registry, nil)
|
||||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||||
|
||||
routingSvc := &mockProxyRoutingService{
|
||||
@@ -574,7 +530,7 @@ func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
|
||||
|
||||
func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) {
|
||||
registry := conversion.NewMemoryRegistry()
|
||||
engine := conversion.NewConversionEngine(registry)
|
||||
engine := conversion.NewConversionEngine(registry, nil)
|
||||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||||
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
||||
|
||||
@@ -636,7 +592,7 @@ func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) {
|
||||
|
||||
func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) {
|
||||
registry := conversion.NewMemoryRegistry()
|
||||
engine := conversion.NewConversionEngine(registry)
|
||||
engine := conversion.NewConversionEngine(registry, nil)
|
||||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||||
|
||||
anthropicAdapter := anthropic.NewAdapter()
|
||||
@@ -759,3 +715,119 @@ func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) {
|
||||
h.HandleProxy(c)
|
||||
assert.Equal(t, 200, w.Code)
|
||||
}
|
||||
|
||||
// ============ extractModelName 测试 ============
|
||||
|
||||
func TestExtractModelName(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
body []byte
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
name: "valid model",
|
||||
body: []byte(`{"model": "gpt-4", "messages": []}`),
|
||||
expected: "gpt-4",
|
||||
},
|
||||
{
|
||||
name: "empty body",
|
||||
body: []byte(`{}`),
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "invalid json",
|
||||
body: []byte(`{invalid}`),
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "nested structure",
|
||||
body: []byte(`{"model": "claude-3", "messages": [{"role": "user", "content": "hello"}]}`),
|
||||
expected: "claude-3",
|
||||
},
|
||||
{
|
||||
name: "model with special chars",
|
||||
body: []byte(`{"model": "gpt-4-0125-preview", "stream": true}`),
|
||||
expected: "gpt-4-0125-preview",
|
||||
},
|
||||
{
|
||||
name: "empty body bytes",
|
||||
body: []byte{},
|
||||
expected: "",
|
||||
},
|
||||
{
|
||||
name: "model is null",
|
||||
body: []byte(`{"model": null}`),
|
||||
expected: "",
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := extractModelName(tt.body)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// ============ isStreamRequest 测试 ============
|
||||
|
||||
func TestIsStreamRequest(t *testing.T) {
|
||||
engine := setupProxyEngine(t)
|
||||
h := &ProxyHandler{engine: engine}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
body []byte
|
||||
clientProtocol string
|
||||
nativePath string
|
||||
expected bool
|
||||
}{
|
||||
{
|
||||
name: "stream true",
|
||||
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
||||
clientProtocol: "openai",
|
||||
nativePath: "/v1/chat/completions",
|
||||
expected: true,
|
||||
},
|
||||
{
|
||||
name: "stream false",
|
||||
body: []byte(`{"model": "gpt-4", "stream": false}`),
|
||||
clientProtocol: "openai",
|
||||
nativePath: "/v1/chat/completions",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "no stream field",
|
||||
body: []byte(`{"model": "gpt-4"}`),
|
||||
clientProtocol: "openai",
|
||||
nativePath: "/v1/chat/completions",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "invalid json",
|
||||
body: []byte(`{invalid}`),
|
||||
clientProtocol: "openai",
|
||||
nativePath: "/v1/chat/completions",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "not chat endpoint",
|
||||
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
||||
clientProtocol: "openai",
|
||||
nativePath: "/v1/models",
|
||||
expected: false,
|
||||
},
|
||||
{
|
||||
name: "anthropic stream",
|
||||
body: []byte(`{"model": "claude-3", "stream": true}`),
|
||||
clientProtocol: "anthropic",
|
||||
nativePath: "/v1/messages",
|
||||
expected: true,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
result := h.isStreamRequest(tt.body, tt.clientProtocol, tt.nativePath)
|
||||
assert.Equal(t, tt.expected, result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user