1101 lines
46 KiB
Go
1101 lines
46 KiB
Go
package handler
|
|
|
|
import (
|
|
"bytes"
|
|
"context"
|
|
"encoding/json"
|
|
"fmt"
|
|
"net/http/httptest"
|
|
"testing"
|
|
|
|
"github.com/gin-gonic/gin"
|
|
"github.com/stretchr/testify/assert"
|
|
"github.com/stretchr/testify/require"
|
|
"go.uber.org/mock/gomock"
|
|
|
|
"nex/backend/internal/conversion"
|
|
"nex/backend/internal/conversion/anthropic"
|
|
"nex/backend/internal/conversion/openai"
|
|
"nex/backend/internal/domain"
|
|
"nex/backend/internal/provider"
|
|
appErrors "nex/backend/pkg/errors"
|
|
"nex/backend/tests/mocks"
|
|
)
|
|
|
|
func init() {
|
|
gin.SetMode(gin.TestMode)
|
|
}
|
|
|
|
|
|
|
|
func setupProxyEngine(t *testing.T) *conversion.ConversionEngine {
|
|
t.Helper()
|
|
registry := conversion.NewMemoryRegistry()
|
|
engine := conversion.NewConversionEngine(registry, nil)
|
|
require.NoError(t, registry.Register(openai.NewAdapter()))
|
|
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
|
return engine
|
|
}
|
|
|
|
func newTestProxyHandler(engine *conversion.ConversionEngine, client *mocks.MockProviderClient, routingSvc *mocks.MockRoutingService, providerSvc *mocks.MockProviderService, statsSvc *mocks.MockStatsService) *ProxyHandler {
|
|
return NewProxyHandler(
|
|
engine,
|
|
client,
|
|
routingSvc,
|
|
providerSvc,
|
|
statsSvc,
|
|
)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_MissingProtocol(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Request = httptest.NewRequest("POST", "/", bytes.NewReader([]byte(`{}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 400, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_NonStreamSuccess(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json"},
|
|
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var resp map[string]any
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Equal(t, "resp-1", resp["id"])
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_RoutingError_WithBody(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(nil, appErrors.ErrModelNotFound)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().List().Return(nil, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"unknown","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 404, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_ConversionError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return nil, context.DeadlineExceeded
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_ClientSendError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return nil, context.DeadlineExceeded
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_StreamSuccess(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().SendStream(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
|
ch := make(chan provider.StreamEvent, 10)
|
|
go func() {
|
|
defer close(ch)
|
|
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\"}}]}\n\n")}
|
|
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
|
|
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
|
|
ch <- provider.StreamEvent{Done: true}
|
|
}()
|
|
return ch, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
|
assert.Contains(t, w.Body.String(), "Hello")
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_StreamError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().SendStream(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
|
return nil, context.DeadlineExceeded
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_ForwardPassthrough_GET(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_ForwardPassthrough_UnsupportedProtocol(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/unknown/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 400, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestExtractHeaders(t *testing.T) {
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Request = httptest.NewRequest("POST", "/", nil)
|
|
c.Request.Header.Set("Authorization", "Bearer test")
|
|
c.Request.Header.Set("Content-Type", "application/json")
|
|
|
|
headers := extractHeaders(c)
|
|
assert.Equal(t, "Bearer test", headers["Authorization"])
|
|
assert.Equal(t, "application/json", headers["Content-Type"])
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_WriteConversionError_NonConversionError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Request = httptest.NewRequest("POST", "/", nil)
|
|
|
|
h.writeConversionError(c, context.DeadlineExceeded, "openai")
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_WriteConversionError_ConversionError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Request = httptest.NewRequest("POST", "/", nil)
|
|
|
|
convErr := conversion.NewConversionError(conversion.ErrorCodeInvalidInput, "bad request")
|
|
h.writeConversionError(c, convErr, "openai")
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_EmptyBody(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleStream_MidStreamError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().SendStream(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
|
ch := make(chan provider.StreamEvent, 10)
|
|
go func() {
|
|
defer close(ch)
|
|
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
|
|
ch <- provider.StreamEvent{Error: fmt.Errorf("connection reset by peer")}
|
|
}()
|
|
return ch, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
|
body := w.Body.String()
|
|
assert.Contains(t, body, "Hello")
|
|
}
|
|
|
|
func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().SendStream(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
|
ch := make(chan provider.StreamEvent, 10)
|
|
go func() {
|
|
defer close(ch)
|
|
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"}}]}\n\n")}
|
|
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
|
|
ch <- provider.StreamEvent{Done: true}
|
|
}()
|
|
return ch, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
|
assert.Equal(t, "no-cache", w.Header().Get("Cache-Control"))
|
|
assert.Equal(t, "keep-alive", w.Header().Get("Connection"))
|
|
body := w.Body.String()
|
|
assert.Contains(t, body, "Hi")
|
|
assert.Contains(t, body, "[DONE]")
|
|
}
|
|
|
|
func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
registry := conversion.NewMemoryRegistry()
|
|
engine := conversion.NewConversionEngine(registry, nil)
|
|
err := registry.Register(openai.NewAdapter())
|
|
require.NoError(t, err)
|
|
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
registry := conversion.NewMemoryRegistry()
|
|
engine := conversion.NewConversionEngine(registry, nil)
|
|
require.NoError(t, registry.Register(openai.NewAdapter()))
|
|
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
registry := conversion.NewMemoryRegistry()
|
|
engine := conversion.NewConversionEngine(registry, nil)
|
|
require.NoError(t, registry.Register(openai.NewAdapter()))
|
|
require.NoError(t, registry.Register(anthropic.NewAdapter()))
|
|
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "claude-3", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json"},
|
|
Body: []byte(`invalid json`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"claude-3","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 500, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("", "").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json", "X-Custom": "test-value"},
|
|
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
assert.Equal(t, "test-value", w.Header().Get("X-Custom"))
|
|
assert.Equal(t, "application/json", w.Header().Get("Content-Type"))
|
|
}
|
|
|
|
func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
registry := conversion.NewMemoryRegistry()
|
|
engine := conversion.NewConversionEngine(registry, nil)
|
|
require.NoError(t, registry.Register(openai.NewAdapter()))
|
|
|
|
anthropicAdapter := anthropic.NewAdapter()
|
|
require.NoError(t, registry.Register(anthropicAdapter))
|
|
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_ForwardPassthrough_NoBody_NoModel(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
func TestIsStreamRequest_EdgeCases(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
tests := []struct {
|
|
name string
|
|
body string
|
|
path string
|
|
expected bool
|
|
}{
|
|
{"stream at end of JSON", `{"messages":[],"stream":true}`, "/chat/completions", true},
|
|
{"stream with spaces", `{"stream" : true}`, "/chat/completions", true},
|
|
{"stream embedded in string value", `{"model":"stream:true"}`, "/chat/completions", false},
|
|
{"empty body", "", "/chat/completions", false},
|
|
{"stream true embeddings", `{"model":"text-emb","stream":true}`, "/v1/embeddings", false},
|
|
}
|
|
|
|
for _, tt := range tests {
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
result := h.isStreamRequest([]byte(tt.body), "openai", tt.path)
|
|
assert.Equal(t, tt.expected, result)
|
|
})
|
|
}
|
|
}
|
|
|
|
func TestProxyHandler_WriteError_RouteError(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Request = httptest.NewRequest("POST", "/", nil)
|
|
|
|
h.writeError(c, fmt.Errorf("model not found"), "openai")
|
|
assert.Equal(t, 404, w.Code)
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
// ============ isStreamRequest 测试 ============
|
|
|
|
func TestIsStreamRequest(t *testing.T) {
|
|
engine := setupProxyEngine(t)
|
|
h := &ProxyHandler{engine: engine}
|
|
|
|
tests := []struct {
|
|
name string
|
|
body []byte
|
|
clientProtocol string
|
|
nativePath string
|
|
expected bool
|
|
}{
|
|
{
|
|
name: "stream true",
|
|
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
|
clientProtocol: "openai",
|
|
nativePath: "/chat/completions",
|
|
expected: true,
|
|
},
|
|
{
|
|
name: "stream false",
|
|
body: []byte(`{"model": "gpt-4", "stream": false}`),
|
|
clientProtocol: "openai",
|
|
nativePath: "/chat/completions",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "no stream field",
|
|
body: []byte(`{"model": "gpt-4"}`),
|
|
clientProtocol: "openai",
|
|
nativePath: "/chat/completions",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "invalid json",
|
|
body: []byte(`{invalid}`),
|
|
clientProtocol: "openai",
|
|
nativePath: "/chat/completions",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "not chat endpoint",
|
|
body: []byte(`{"model": "gpt-4", "stream": true}`),
|
|
clientProtocol: "openai",
|
|
nativePath: "/models",
|
|
expected: false,
|
|
},
|
|
{
|
|
name: "anthropic stream",
|
|
body: []byte(`{"model": "claude-3", "stream": true}`),
|
|
clientProtocol: "anthropic",
|
|
nativePath: "/v1/messages",
|
|
expected: true,
|
|
},
|
|
}
|
|
for _, tt := range tests {
|
|
t.Run(tt.name, func(t *testing.T) {
|
|
result := h.isStreamRequest(tt.body, tt.clientProtocol, tt.nativePath)
|
|
assert.Equal(t, tt.expected, result)
|
|
})
|
|
}
|
|
}
|
|
|
|
// ============ Models / ModelInfo 本地聚合测试 ============
|
|
|
|
func TestProxyHandler_HandleProxy_Models_LocalAggregation(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().ListEnabledModels().Return([]domain.Model{
|
|
{ID: "m1", ProviderID: "openai", ModelName: "gpt-4", Enabled: true},
|
|
{ID: "m2", ProviderID: "anthropic", ModelName: "claude-3", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
data, ok := resp["data"].([]interface{})
|
|
require.True(t, ok)
|
|
assert.Len(t, data, 2)
|
|
|
|
first := data[0].(map[string]interface{})
|
|
assert.Equal(t, "openai/gpt-4", first["id"])
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_ModelInfo_LocalQuery(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().GetModelByProviderAndName("openai", "gpt-4").Return(&domain.Model{ID: "m1", ProviderID: "openai", ModelName: "gpt-4", Enabled: true}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/openai/gpt-4"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models/openai/gpt-4", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Equal(t, "openai/gpt-4", resp["id"])
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_Models_EmptySuffix_ForwardPassthrough(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
providerSvc.EXPECT().List().Return([]domain.Provider{
|
|
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Body: []byte(`{"object":"list","data":[]}`),
|
|
}, nil
|
|
})
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName(gomock.Any(), gomock.Any()).Return(nil, appErrors.ErrModelNotFound).AnyTimes()
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/"}}
|
|
c.Request = httptest.NewRequest("GET", "/openai/models/", nil)
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
}
|
|
|
|
// ============ Smart Passthrough 统一模型 ID 路由测试 ============
|
|
|
|
func TestProxyHandler_HandleProxy_SmartPassthrough_UnifiedID(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("openai_p", "gpt-4").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "openai_p", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "openai_p", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
var req map[string]interface{}
|
|
json.Unmarshal(spec.Body, &req)
|
|
assert.Equal(t, "gpt-4", req["model"])
|
|
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json"},
|
|
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Equal(t, "openai_p/gpt-4", resp["model"])
|
|
}
|
|
|
|
// ============ 跨协议统一模型 ID 路由测试 ============
|
|
|
|
func TestProxyHandler_HandleProxy_CrossProtocol_NonStream_UnifiedID(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("anthropic_p", "claude-3").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "anthropic_p", Name: "Anthropic", APIKey: "sk-test", BaseURL: "https://api.anthropic.com", Protocol: "anthropic", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "anthropic_p", ModelName: "claude-3", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json"},
|
|
Body: []byte(`{"id":"msg-1","type":"message","role":"assistant","model":"claude-3","content":[{"type":"text","text":"Hello"}],"stop_reason":"end_turn","usage":{"input_tokens":5,"output_tokens":3}}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Equal(t, "anthropic_p/claude-3", resp["model"])
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_CrossProtocol_Stream_UnifiedID(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("anthropic_p", "claude-3").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "anthropic_p", Name: "Anthropic", APIKey: "sk-test", BaseURL: "https://api.anthropic.com", Protocol: "anthropic", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "anthropic_p", ModelName: "claude-3", Enabled: true},
|
|
}, nil)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().SendStream(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
|
|
ch := make(chan provider.StreamEvent, 10)
|
|
go func() {
|
|
defer close(ch)
|
|
ch <- provider.StreamEvent{Data: []byte(`event: message_start
|
|
data: {"type":"message_start","message":{"id":"msg-1","type":"message","role":"assistant","model":"claude-3","content":[]}}
|
|
|
|
`)}
|
|
ch <- provider.StreamEvent{Data: []byte(`event: content_block_delta
|
|
data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hi"}}
|
|
|
|
`)}
|
|
ch <- provider.StreamEvent{Data: []byte(`event: message_stop
|
|
data: {"type":"message_stop"}
|
|
|
|
`)}
|
|
ch <- provider.StreamEvent{Done: true}
|
|
}()
|
|
return ch, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
|
|
|
|
body := w.Body.String()
|
|
assert.Contains(t, body, "anthropic_p/claude-3", "跨协议流式响应中 model 应被覆写为统一模型 ID")
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_SmartPassthrough_Fidelity(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("openai_p", "gpt-4").Return(&domain.RouteResult{
|
|
Provider: &domain.Provider{ID: "openai_p", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
|
|
Model: &domain.Model{ID: "m1", ProviderID: "openai_p", ModelName: "gpt-4", Enabled: true},
|
|
}, nil)
|
|
var capturedRequestBody []byte
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
client.EXPECT().Send(gomock.Any(), gomock.Any()).DoAndReturn(func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
|
|
capturedRequestBody = spec.Body
|
|
return &conversion.HTTPResponseSpec{
|
|
StatusCode: 200,
|
|
Headers: map[string]string{"Content-Type": "application/json"},
|
|
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8},"unknown_field":"preserved"}`),
|
|
}, nil
|
|
})
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
statsSvc.EXPECT().Record(gomock.Any(), gomock.Any()).Return(nil).AnyTimes()
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}],"custom_param":"should_be_preserved"}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 200, w.Code)
|
|
|
|
var reqBody map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(capturedRequestBody, &reqBody))
|
|
assert.Equal(t, "gpt-4", reqBody["model"], "请求中 model 应被改写为上游模型名")
|
|
assert.Equal(t, "should_be_preserved", reqBody["custom_param"], "Smart Passthrough 应保留未知参数")
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Equal(t, "openai_p/gpt-4", resp["model"], "响应中 model 应被改写为统一模型 ID")
|
|
assert.Equal(t, "preserved", resp["unknown_field"], "Smart Passthrough 应保留未知响应字段")
|
|
}
|
|
|
|
func TestProxyHandler_HandleProxy_UnifiedID_ModelNotFound(t *testing.T) {
|
|
ctrl := gomock.NewController(t)
|
|
defer ctrl.Finish()
|
|
|
|
engine := setupProxyEngine(t)
|
|
routingSvc := mocks.NewMockRoutingService(ctrl)
|
|
routingSvc.EXPECT().RouteByModelName("unknown", "model").Return(nil, appErrors.ErrModelNotFound)
|
|
providerSvc := mocks.NewMockProviderService(ctrl)
|
|
client := mocks.NewMockProviderClient(ctrl)
|
|
statsSvc := mocks.NewMockStatsService(ctrl)
|
|
h := newTestProxyHandler(engine, client, routingSvc, providerSvc, statsSvc)
|
|
|
|
w := httptest.NewRecorder()
|
|
c, _ := gin.CreateTestContext(w)
|
|
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
|
|
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
|
|
|
|
h.HandleProxy(c)
|
|
assert.Equal(t, 404, w.Code)
|
|
|
|
var resp map[string]interface{}
|
|
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
|
|
assert.Contains(t, resp, "error")
|
|
}
|