1
0

refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包

引入 Canonical Model 和 ProtocolAdapter 架构,支持 OpenAI/Anthropic 协议间
无缝转换,统一 ProxyHandler 替代分散的 OpenAI/Anthropic Handler,简化
ProviderClient 为协议无关的 HTTP 发送器,Provider 新增 protocol 字段。
This commit is contained in:
2026-04-20 00:36:27 +08:00
parent 26810d9410
commit 1dac347d3b
65 changed files with 9690 additions and 2139 deletions

View File

@@ -2,7 +2,6 @@ package provider
import (
"context"
"encoding/json"
"fmt"
"net/http"
"net/http/httptest"
@@ -11,14 +10,13 @@ import (
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/protocol/openai"
"nex/backend/internal/conversion"
)
func TestNewClient(t *testing.T) {
client := NewClient()
require.NotNil(t, client)
assert.NotNil(t, client.httpClient)
assert.NotNil(t, client.adapter)
assert.Equal(t, 4096, client.streamCfg.InitialBufferSize)
assert.Equal(t, 65536, client.streamCfg.MaxBufferSize)
assert.Equal(t, 100, client.streamCfg.ChannelBufferSize)
@@ -31,67 +29,66 @@ func TestDefaultStreamConfig(t *testing.T) {
assert.Equal(t, 100, cfg.ChannelBufferSize)
}
func TestClient_SendRequest_Success(t *testing.T) {
func TestClient_Send_Success(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "POST", r.Method)
assert.Equal(t, "application/json", r.Header.Get("Content-Type"))
assert.Equal(t, "Bearer test-key", r.Header.Get("Authorization"))
resp := openai.ChatCompletionResponse{
ID: "chatcmpl-123",
Choices: []openai.Choice{
{Index: 0, Message: &openai.Message{Role: "assistant", Content: "Hello!"}},
},
}
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(resp)
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{"id":"test","model":"gpt-4"}`))
}))
defer server.Close()
client := NewClient()
req := &openai.ChatCompletionRequest{
Model: "gpt-4",
Messages: []openai.Message{{Role: "user", Content: "Hi"}},
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{
"Authorization": "Bearer test-key",
"Content-Type": "application/json",
},
Body: []byte(`{"model":"gpt-4","messages":[]}`),
}
result, err := client.SendRequest(context.Background(), req, "test-key", server.URL)
result, err := client.Send(context.Background(), spec)
require.NoError(t, err)
assert.Equal(t, "chatcmpl-123", result.ID)
assert.Equal(t, 200, result.StatusCode)
assert.Contains(t, string(result.Body), "test")
}
func TestClient_SendRequest_ErrorResponse(t *testing.T) {
func TestClient_Send_ErrorResponse(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusUnauthorized)
json.NewEncoder(w).Encode(openai.ErrorResponse{
Error: openai.ErrorDetail{Message: "Invalid API key"},
})
w.Write([]byte(`{"error":{"message":"Invalid API key"}}`))
}))
defer server.Close()
client := NewClient()
req := &openai.ChatCompletionRequest{
Model: "gpt-4",
Messages: []openai.Message{{Role: "user", Content: "Hi"}},
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer bad-key"},
Body: []byte(`{}`),
}
_, err := client.SendRequest(context.Background(), req, "bad-key", server.URL)
assert.Error(t, err)
assert.Contains(t, err.Error(), "Invalid API key")
result, err := client.Send(context.Background(), spec)
require.NoError(t, err)
assert.Equal(t, 401, result.StatusCode)
}
func TestClient_SendRequest_ConnectionError(t *testing.T) {
func TestClient_Send_ConnectionError(t *testing.T) {
client := NewClient()
req := &openai.ChatCompletionRequest{
Model: "gpt-4",
Messages: []openai.Message{{Role: "user", Content: "Hi"}},
spec := conversion.HTTPRequestSpec{
URL: "http://localhost:1/v1/chat/completions",
Method: "POST",
}
_, err := client.SendRequest(context.Background(), req, "key", "http://localhost:1")
_, err := client.Send(context.Background(), spec)
assert.Error(t, err)
}
func TestClient_SendStreamRequest_CreatesChannel(t *testing.T) {
// 使用一个慢服务器确保客户端有时间读取
func TestClient_SendStream_CreatesChannel(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
@@ -99,35 +96,36 @@ func TestClient_SendStreamRequest_CreatesChannel(t *testing.T) {
defer server.Close()
client := NewClient()
req := &openai.ChatCompletionRequest{
Model: "gpt-4",
Messages: []openai.Message{{Role: "user", Content: "Hi"}},
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{}`),
}
eventChan, err := client.SendStreamRequest(context.Background(), req, "test-key", server.URL)
eventChan, err := client.SendStream(context.Background(), spec)
require.NoError(t, err)
require.NotNil(t, eventChan)
// 读取直到 channel 关闭(服务器关闭后应产生 EOF
for range eventChan {
// 消费所有事件
}
// channel 应已关闭(不阻塞即通过)
}
func TestClient_SendStreamRequest_ErrorResponse(t *testing.T) {
func TestClient_SendStream_ErrorResponse(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusInternalServerError)
}))
defer server.Close()
client := NewClient()
req := &openai.ChatCompletionRequest{
Model: "gpt-4",
Messages: []openai.Message{{Role: "user", Content: "Hi"}},
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer key"},
Body: []byte(`{}`),
}
_, err := client.SendStreamRequest(context.Background(), req, "key", server.URL)
_, err := client.SendStream(context.Background(), spec)
assert.Error(t, err)
}
@@ -145,7 +143,7 @@ func TestIsNetworkError(t *testing.T) {
{"", false},
}
for _, tt := range tests {
err := fmt.Errorf("%s", tt.input) //nolint:govet
err := fmt.Errorf("%s", tt.input)
assert.Equal(t, tt.want, isNetworkError(err), "isNetworkError(%q)", tt.input)
}
}