1
0

refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包

引入 Canonical Model 和 ProtocolAdapter 架构,支持 OpenAI/Anthropic 协议间
无缝转换,统一 ProxyHandler 替代分散的 OpenAI/Anthropic Handler,简化
ProviderClient 为协议无关的 HTTP 发送器,Provider 新增 protocol 字段。
This commit is contained in:
2026-04-20 00:36:27 +08:00
parent 26810d9410
commit 1dac347d3b
65 changed files with 9690 additions and 2139 deletions

View File

@@ -4,7 +4,9 @@ import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http/httptest"
"strings"
"testing"
"time"
@@ -13,7 +15,6 @@ import (
"github.com/stretchr/testify/require"
"nex/backend/internal/domain"
"nex/backend/internal/protocol/openai"
"nex/backend/internal/provider"
appErrors "nex/backend/pkg/errors"
)
@@ -34,8 +35,8 @@ func (m *mockRoutingService) Route(modelName string) (*domain.RouteResult, error
}
type mockStatsService struct {
err error
stats []domain.UsageStats
err error
stats []domain.UsageStats
aggrResult []map[string]interface{}
}
@@ -84,61 +85,14 @@ func (m *mockModelService) Update(id string, updates map[string]interface{}) err
func (m *mockModelService) Delete(id string) error { return m.err }
type mockProviderClient struct {
resp *openai.ChatCompletionResponse
eventChan chan provider.StreamEvent
err error
err error
}
func (m *mockProviderClient) SendRequest(ctx context.Context, req *openai.ChatCompletionRequest, apiKey, baseURL string) (*openai.ChatCompletionResponse, error) {
return m.resp, m.err
func (m *mockProviderClient) Send(ctx context.Context, spec interface{}) (interface{}, error) {
return nil, m.err
}
func (m *mockProviderClient) SendStreamRequest(ctx context.Context, req *openai.ChatCompletionRequest, apiKey, baseURL string) (<-chan provider.StreamEvent, error) {
return m.eventChan, m.err
}
// ============ OpenAI Handler 测试 ============
func TestOpenAIHandler_HandleChatCompletions_InvalidJSON(t *testing.T) {
h := NewOpenAIHandler(nil, nil, nil)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/v1/chat/completions", bytes.NewReader([]byte("invalid")))
h.HandleChatCompletions(c)
assert.Equal(t, 400, w.Code)
}
func TestOpenAIHandler_HandleChatCompletions_ValidationError(t *testing.T) {
h := NewOpenAIHandler(nil, nil, nil)
// 缺少 model 字段
body, _ := json.Marshal(map[string]interface{}{
"messages": []map[string]string{{"role": "user", "content": "hi"}},
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/v1/chat/completions", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.HandleChatCompletions(c)
assert.Equal(t, 400, w.Code)
}
func TestOpenAIHandler_HandleChatCompletions_RouteError(t *testing.T) {
routingSvc := &mockRoutingService{err: appErrors.ErrModelNotFound}
h := NewOpenAIHandler(nil, routingSvc, nil)
body, _ := json.Marshal(map[string]interface{}{
"model": "nonexistent",
"messages": []map[string]string{{"role": "user", "content": "hi"}},
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/v1/chat/completions", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.HandleChatCompletions(c)
assert.Equal(t, 404, w.Code)
func (m *mockProviderClient) SendStream(ctx context.Context, spec interface{}) (<-chan provider.StreamEvent, error) {
return nil, m.err
}
// ============ Provider Handler 测试 ============
@@ -283,8 +237,16 @@ func TestFormatValidationErrors(t *testing.T) {
"model": "模型名称不能为空",
"messages": "消息列表不能为空",
}
result := formatValidationErrors(errs)
result := formatMapErrors(errs)
require.Contains(t, result, "请求验证失败")
require.Contains(t, result, "model")
require.Contains(t, result, "messages")
}
func formatMapErrors(errs map[string]string) string {
parts := make([]string, 0, len(errs))
for field, msg := range errs {
parts = append(parts, fmt.Sprintf("%s: %s", field, msg))
}
return "请求验证失败: " + strings.Join(parts, "; ")
}