实现统一模型 ID 格式 (provider_id/model_name),支持跨协议模型标识和 Smart Passthrough。 核心变更: - 新增 pkg/modelid 包:解析、格式化、校验统一模型 ID - 数据库迁移:models 表使用 UUID 主键 + UNIQUE(provider_id, model_name) 约束 - Repository 层:FindByProviderAndModelName、ListEnabled 方法 - Service 层:联合唯一校验、provider ID 字符集校验 - Conversion 层:ExtractModelName、RewriteRequestModelName/RewriteResponseModelName 方法 - Handler 层:统一模型 ID 路由、Smart Passthrough、Models API 本地聚合 - 新增 error-responses、unified-model-id 规范 测试覆盖: - 单元测试:modelid、conversion、handler、service、repository - 集成测试:统一模型 ID 路由、Smart Passthrough 保真性、跨协议转换 - 迁移测试:UUID 主键、UNIQUE 约束、级联删除 OpenSpec: - 归档 unified-model-id 变更到 archive/2026-04-21-unified-model-id - 同步 11 个 delta specs 到 main specs - 新增 error-responses、unified-model-id 规范文件
1060 lines
43 KiB
Go
1060 lines
43 KiB
Go
package handler
|
||
|
||
import (
|
||
"bytes"
|
||
"context"
|
||
"encoding/json"
|
||
"fmt"
|
||
"net/http/httptest"
|
||
"testing"
|
||
"time"
|
||
|
||
"github.com/gin-gonic/gin"
|
||
"github.com/stretchr/testify/assert"
|
||
"github.com/stretchr/testify/require"
|
||
|
||
"nex/backend/internal/conversion"
|
||
"nex/backend/internal/conversion/anthropic"
|
||
"nex/backend/internal/conversion/openai"
|
||
"nex/backend/internal/domain"
|
||
"nex/backend/internal/provider"
|
||
appErrors "nex/backend/pkg/errors"
|
||
)
|
||
|
||
func init() {
|
||
gin.SetMode(gin.TestMode)
|
||
}
|
||
|
||
type mockProxyProviderClient struct {
|
||
sendFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error)
|
||
sendStreamFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error)
|
||
}
|
||
|
||
func (m *mockProxyProviderClient) Send(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
if m.sendFn != nil {
|
||
return m.sendFn(ctx, spec)
|
||
}
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"id":"resp-1","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"hi"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
|
||
}, nil
|
||
}
|
||
|
||
func (m *mockProxyProviderClient) SendStream(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
if m.sendStreamFn != nil {
|
||
return m.sendStreamFn(ctx, spec)
|
||
}
|
||
ch := make(chan provider.StreamEvent, 10)
|
||
go func() {
|
||
defer close(ch)
|
||
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n")}
|
||
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
|
||
ch <- provider.StreamEvent{Done: true}
|
||
}()
|
||
return ch, nil
|
||
}
|
||
|
||
type mockProxyRoutingService struct {
|
||
result *domain.RouteResult
|
||
err error
|
||
}
|
||
|
||
func (m *mockProxyRoutingService) RouteByModelName(providerID, modelName string) (*domain.RouteResult, error) {
|
||
return m.result, m.err
|
||
}
|
||
|
||
type mockProxyProviderService struct {
|
||
providers []domain.Provider
|
||
err error
|
||
enabledModels []domain.Model
|
||
modelByProvName *domain.Model
|
||
}
|
||
|
||
func (m *mockProxyProviderService) ListEnabledModels() ([]domain.Model, error) {
|
||
return m.enabledModels, nil
|
||
}
|
||
|
||
func (m *mockProxyProviderService) GetModelByProviderAndName(providerID, modelName string) (*domain.Model, error) {
|
||
return m.modelByProvName, nil
|
||
}
|
||
|
||
func (m *mockProxyProviderService) Create(p *domain.Provider) error { return nil }
|
||
func (m *mockProxyProviderService) Get(id string, maskKey bool) (*domain.Provider, error) { return nil, nil }
|
||
func (m *mockProxyProviderService) List() ([]domain.Provider, error) { return m.providers, m.err }
|
||
func (m *mockProxyProviderService) Update(id string, updates map[string]interface{}) error { return nil }
|
||
func (m *mockProxyProviderService) Delete(id string) error { return nil }
|
||
|
||
type mockProxyStatsService struct{}
|
||
|
||
func (m *mockProxyStatsService) Record(providerID, modelName string) error { return nil }
|
||
func (m *mockProxyStatsService) Get(providerID, modelName string, startDate, endDate *time.Time) ([]domain.UsageStats, error) { return nil, nil }
|
||
func (m *mockProxyStatsService) Aggregate(stats []domain.UsageStats, groupBy string) []map[string]interface{} { return nil }
|
||
|
||
func setupProxyEngine(t *testing.T) *conversion.ConversionEngine {
|
||
t.Helper()
|
||
registry := conversion.NewMemoryRegistry()
|
||
engine := conversion.NewConversionEngine(registry, nil)
|
||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
||
return engine
|
||
}
|
||
|
||
func newTestProxyHandler(engine *conversion.ConversionEngine, client *mockProxyProviderClient, routingSvc *mockProxyRoutingService, providerSvc *mockProxyProviderService) *ProxyHandler {
|
||
return NewProxyHandler(
|
||
engine,
|
||
client,
|
||
routingSvc,
|
||
providerSvc,
|
||
&mockProxyStatsService{},
|
||
)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_MissingProtocol(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Request = httptest.NewRequest("POST", "/", bytes.NewReader([]byte(`{}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 400, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_NonStreamSuccess(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
var resp map[string]any
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Equal(t, "resp-1", resp["id"])
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_RoutingError_WithBody(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 404, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_ConversionError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return nil, context.DeadlineExceeded
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_ClientSendError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return nil, context.DeadlineExceeded
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_StreamSuccess(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
ch := make(chan provider.StreamEvent, 10)
|
||
go func() {
|
||
defer close(ch)
|
||
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\"}}]}\n\n")}
|
||
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
|
||
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
|
||
ch <- provider.StreamEvent{Done: true}
|
||
}()
|
||
return ch, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
||
assert.Contains(t, w.Body.String(), "Hello")
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_StreamError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
return nil, context.DeadlineExceeded
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_ForwardPassthrough_GET(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_ForwardPassthrough_UnsupportedProtocol(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/unknown/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 400, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{providers: []domain.Provider{}}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
// Models 接口现在本地聚合,返回空列表 200
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestExtractHeaders(t *testing.T) {
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Request = httptest.NewRequest("POST", "/", nil)
|
||
c.Request.Header.Set("Authorization", "Bearer test")
|
||
c.Request.Header.Set("Content-Type", "application/json")
|
||
|
||
headers := extractHeaders(c)
|
||
assert.Equal(t, "Bearer test", headers["Authorization"])
|
||
assert.Equal(t, "application/json", headers["Content-Type"])
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_WriteConversionError_NonConversionError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Request = httptest.NewRequest("POST", "/", nil)
|
||
|
||
h.writeConversionError(c, context.DeadlineExceeded, "openai")
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_WriteConversionError_ConversionError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Request = httptest.NewRequest("POST", "/", nil)
|
||
|
||
convErr := conversion.NewConversionError(conversion.ErrorCodeInvalidInput, "bad request")
|
||
h.writeConversionError(c, convErr, "openai")
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_EmptyBody(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Body: []byte(`{"object":"list","data":[]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleStream_MidStreamError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
ch := make(chan provider.StreamEvent, 10)
|
||
go func() {
|
||
defer close(ch)
|
||
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
|
||
ch <- provider.StreamEvent{Error: fmt.Errorf("connection reset by peer")}
|
||
}()
|
||
return ch, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
||
body := w.Body.String()
|
||
assert.Contains(t, body, "Hello")
|
||
}
|
||
|
||
func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
ch := make(chan provider.StreamEvent, 10)
|
||
go func() {
|
||
defer close(ch)
|
||
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"}}]}\n\n")}
|
||
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
|
||
ch <- provider.StreamEvent{Done: true}
|
||
}()
|
||
return ch, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
||
assert.Equal(t, "no-cache", w.Header().Get("Cache-Control"))
|
||
assert.Equal(t, "keep-alive", w.Header().Get("Connection"))
|
||
body := w.Body.String()
|
||
assert.Contains(t, body, "Hi")
|
||
assert.Contains(t, body, "[DONE]")
|
||
}
|
||
|
||
func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
|
||
registry := conversion.NewMemoryRegistry()
|
||
engine := conversion.NewConversionEngine(registry, nil)
|
||
err := registry.Register(openai.NewAdapter())
|
||
require.NoError(t, err)
|
||
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
|
||
registry := conversion.NewMemoryRegistry()
|
||
engine := conversion.NewConversionEngine(registry, nil)
|
||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) {
|
||
registry := conversion.NewMemoryRegistry()
|
||
engine := conversion.NewConversionEngine(registry, nil)
|
||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
||
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "claude-3", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`invalid json`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"claude-3","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 500, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json", "X-Custom": "test-value"},
|
||
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
assert.Equal(t, "test-value", w.Header().Get("X-Custom"))
|
||
assert.Equal(t, "application/json", w.Header().Get("Content-Type"))
|
||
}
|
||
|
||
func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) {
|
||
registry := conversion.NewMemoryRegistry()
|
||
engine := conversion.NewConversionEngine(registry, nil)
|
||
require.NoError(t, registry.Register(openai.NewAdapter()))
|
||
|
||
anthropicAdapter := anthropic.NewAdapter()
|
||
require.NoError(t, registry.Register(anthropicAdapter))
|
||
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"object":"list","data":[]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_ForwardPassthrough_NoBody_NoModel(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
func TestIsStreamRequest_EdgeCases(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||
|
||
tests := []struct {
|
||
name string
|
||
body string
|
||
path string
|
||
expected bool
|
||
}{
|
||
{"stream at end of JSON", `{"messages":[],"stream":true}`, "/v1/chat/completions", true},
|
||
{"stream with spaces", `{"stream" : true}`, "/v1/chat/completions", true},
|
||
{"stream embedded in string value", `{"model":"stream:true"}`, "/v1/chat/completions", false},
|
||
{"empty body", "", "/v1/chat/completions", false},
|
||
{"stream true embeddings", `{"model":"text-emb","stream":true}`, "/v1/embeddings", false},
|
||
}
|
||
|
||
for _, tt := range tests {
|
||
t.Run(tt.name, func(t *testing.T) {
|
||
result := h.isStreamRequest([]byte(tt.body), "openai", tt.path)
|
||
assert.Equal(t, tt.expected, result)
|
||
})
|
||
}
|
||
}
|
||
|
||
func TestProxyHandler_WriteError_RouteError(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Request = httptest.NewRequest("POST", "/", nil)
|
||
|
||
h.writeError(c, fmt.Errorf("model not found"), "openai")
|
||
assert.Equal(t, 404, w.Code)
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Body: []byte(`{"object":"list","data":[]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
// ============ isStreamRequest 测试 ============
|
||
|
||
func TestIsStreamRequest(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
h := &ProxyHandler{engine: engine}
|
||
|
||
tests := []struct {
|
||
name string
|
||
body []byte
|
||
clientProtocol string
|
||
nativePath string
|
||
expected bool
|
||
}{
|
||
{
|
||
name: "stream true",
|
||
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
||
clientProtocol: "openai",
|
||
nativePath: "/v1/chat/completions",
|
||
expected: true,
|
||
},
|
||
{
|
||
name: "stream false",
|
||
body: []byte(`{"model": "gpt-4", "stream": false}`),
|
||
clientProtocol: "openai",
|
||
nativePath: "/v1/chat/completions",
|
||
expected: false,
|
||
},
|
||
{
|
||
name: "no stream field",
|
||
body: []byte(`{"model": "gpt-4"}`),
|
||
clientProtocol: "openai",
|
||
nativePath: "/v1/chat/completions",
|
||
expected: false,
|
||
},
|
||
{
|
||
name: "invalid json",
|
||
body: []byte(`{invalid}`),
|
||
clientProtocol: "openai",
|
||
nativePath: "/v1/chat/completions",
|
||
expected: false,
|
||
},
|
||
{
|
||
name: "not chat endpoint",
|
||
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
||
clientProtocol: "openai",
|
||
nativePath: "/v1/models",
|
||
expected: false,
|
||
},
|
||
{
|
||
name: "anthropic stream",
|
||
body: []byte(`{"model": "claude-3", "stream": true}`),
|
||
clientProtocol: "anthropic",
|
||
nativePath: "/v1/messages",
|
||
expected: true,
|
||
},
|
||
}
|
||
for _, tt := range tests {
|
||
t.Run(tt.name, func(t *testing.T) {
|
||
result := h.isStreamRequest(tt.body, tt.clientProtocol, tt.nativePath)
|
||
assert.Equal(t, tt.expected, result)
|
||
})
|
||
}
|
||
}
|
||
|
||
// ============ Models / ModelInfo 本地聚合测试 ============
|
||
|
||
func TestProxyHandler_HandleProxy_Models_LocalAggregation(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
providerSvc := &mockProxyProviderService{
|
||
enabledModels: []domain.Model{
|
||
{ID: "m1", ProviderID: "openai", ModelName: "gpt-4", Enabled: true},
|
||
{ID: "m2", ProviderID: "anthropic", ModelName: "claude-3", Enabled: true},
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
data, ok := resp["data"].([]interface{})
|
||
require.True(t, ok)
|
||
assert.Len(t, data, 2)
|
||
|
||
// 验证统一模型 ID 格式
|
||
first := data[0].(map[string]interface{})
|
||
assert.Equal(t, "openai/gpt-4", first["id"])
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_ModelInfo_LocalQuery(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
providerSvc := &mockProxyProviderService{
|
||
modelByProvName: &domain.Model{ID: "m1", ProviderID: "openai", ModelName: "gpt-4", Enabled: true},
|
||
}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/openai/gpt-4"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models/openai/gpt-4", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Equal(t, "openai/gpt-4", resp["id"])
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_Models_EmptySuffix_ForwardPassthrough(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Body: []byte(`{"object":"list","data":[]}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, &mockProxyRoutingService{err: appErrors.ErrModelNotFound}, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/"}}
|
||
c.Request = httptest.NewRequest("GET", "/openai/v1/models/", nil)
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
}
|
||
|
||
// ============ Smart Passthrough 统一模型 ID 路由测试 ============
|
||
|
||
func TestProxyHandler_HandleProxy_SmartPassthrough_UnifiedID(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "openai_p", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "openai_p", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
// 验证请求体中的 model 已被改写为上游模型名
|
||
var req map[string]interface{}
|
||
json.Unmarshal(spec.Body, &req)
|
||
assert.Equal(t, "gpt-4", req["model"])
|
||
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
// 客户端发送统一模型 ID
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
// 验证响应中的 model 已被改写为统一模型 ID
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Equal(t, "openai_p/gpt-4", resp["model"])
|
||
}
|
||
|
||
// ============ 跨协议统一模型 ID 路由测试 ============
|
||
|
||
func TestProxyHandler_HandleProxy_CrossProtocol_NonStream_UnifiedID(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "anthropic_p", Name: "Anthropic", APIKey: "sk-test", BaseURL: "https://api.anthropic.com", Protocol: "anthropic", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "anthropic_p", ModelName: "claude-3", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"id":"msg-1","type":"message","role":"assistant","model":"claude-3","content":[{"type":"text","text":"Hello"}],"stop_reason":"end_turn","usage":{"input_tokens":5,"output_tokens":3}}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
// OpenAI 客户端使用统一模型 ID 路由到 Anthropic 供应商
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
// 验证跨协议转换后响应中的 model 被覆写为统一模型 ID
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Equal(t, "anthropic_p/claude-3", resp["model"])
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_CrossProtocol_Stream_UnifiedID(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "anthropic_p", Name: "Anthropic", APIKey: "sk-test", BaseURL: "https://api.anthropic.com", Protocol: "anthropic", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "anthropic_p", ModelName: "claude-3", Enabled: true},
|
||
},
|
||
}
|
||
client := &mockProxyProviderClient{
|
||
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
||
ch := make(chan provider.StreamEvent, 10)
|
||
go func() {
|
||
defer close(ch)
|
||
ch <- provider.StreamEvent{Data: []byte(`event: message_start
|
||
data: {"type":"message_start","message":{"id":"msg-1","type":"message","role":"assistant","model":"claude-3","content":[]}}
|
||
|
||
`)}
|
||
ch <- provider.StreamEvent{Data: []byte(`event: content_block_delta
|
||
data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hi"}}
|
||
|
||
`)}
|
||
ch <- provider.StreamEvent{Data: []byte(`event: message_stop
|
||
data: {"type":"message_stop"}
|
||
|
||
`)}
|
||
ch <- provider.StreamEvent{Done: true}
|
||
}()
|
||
return ch, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
||
|
||
body := w.Body.String()
|
||
// 验证跨协议流式中 model 被覆写为统一模型 ID
|
||
assert.Contains(t, body, "anthropic_p/claude-3", "跨协议流式响应中 model 应被覆写为统一模型 ID")
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_SmartPassthrough_Fidelity(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{
|
||
result: &domain.RouteResult{
|
||
Provider: &domain.Provider{ID: "openai_p", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
||
Model: &domain.Model{ID: "m1", ProviderID: "openai_p", ModelName: "gpt-4", Enabled: true},
|
||
},
|
||
}
|
||
var capturedRequestBody []byte
|
||
client := &mockProxyProviderClient{
|
||
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
||
capturedRequestBody = spec.Body
|
||
return &conversion.HTTPResponseSpec{
|
||
StatusCode: 200,
|
||
Headers: map[string]string{"Content-Type": "application/json"},
|
||
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8},"unknown_field":"preserved"}`),
|
||
}, nil
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
// 包含未知参数,验证 Smart Passthrough 保真性
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}],"custom_param":"should_be_preserved"}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 200, w.Code)
|
||
|
||
// 验证请求中 model 被改写为上游模型名,但未知参数保留
|
||
var reqBody map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(capturedRequestBody, &reqBody))
|
||
assert.Equal(t, "gpt-4", reqBody["model"], "请求中 model 应被改写为上游模型名")
|
||
assert.Equal(t, "should_be_preserved", reqBody["custom_param"], "Smart Passthrough 应保留未知参数")
|
||
|
||
// 验证响应中 model 被改写为统一模型 ID,但未知参数保留
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Equal(t, "openai_p/gpt-4", resp["model"], "响应中 model 应被改写为统一模型 ID")
|
||
assert.Equal(t, "preserved", resp["unknown_field"], "Smart Passthrough 应保留未知响应字段")
|
||
}
|
||
|
||
func TestProxyHandler_HandleProxy_UnifiedID_ModelNotFound(t *testing.T) {
|
||
engine := setupProxyEngine(t)
|
||
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
|
||
providerSvc := &mockProxyProviderService{
|
||
providers: []domain.Provider{
|
||
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
||
},
|
||
}
|
||
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc)
|
||
|
||
w := httptest.NewRecorder()
|
||
c, _ := gin.CreateTestContext(w)
|
||
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
||
// 使用统一模型 ID 格式但模型不存在
|
||
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
|
||
|
||
h.HandleProxy(c)
|
||
assert.Equal(t, 404, w.Code)
|
||
|
||
var resp map[string]interface{}
|
||
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
||
assert.Contains(t, resp, "error")
|
||
}
|