package handler import ( "bytes" "context" "encoding/json" "fmt" "net/http/httptest" "testing" "time" "github.com/gin-gonic/gin" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" "nex/backend/internal/conversion" "nex/backend/internal/conversion/anthropic" "nex/backend/internal/conversion/openai" "nex/backend/internal/domain" "nex/backend/internal/provider" appErrors "nex/backend/pkg/errors" ) func init() { gin.SetMode(gin.TestMode) } type mockProxyProviderClient struct { sendFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) sendStreamFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) } func (m *mockProxyProviderClient) Send(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { if m.sendFn != nil { return m.sendFn(ctx, spec) } return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`{"id":"resp-1","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"hi"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`), }, nil } func (m *mockProxyProviderClient) SendStream(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) { if m.sendStreamFn != nil { return m.sendStreamFn(ctx, spec) } ch := make(chan provider.StreamEvent, 10) go func() { defer close(ch) ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n")} ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")} ch <- provider.StreamEvent{Done: true} }() return ch, nil } type mockProxyRoutingService struct { result *domain.RouteResult err error } func (m *mockProxyRoutingService) Route(modelName string) (*domain.RouteResult, error) { return m.result, m.err } type mockProxyProviderService struct { providers []domain.Provider err error } func (m *mockProxyProviderService) Create(p *domain.Provider) error { return nil } func (m *mockProxyProviderService) Get(id string, maskKey bool) (*domain.Provider, error) { return nil, nil } func (m *mockProxyProviderService) List() ([]domain.Provider, error) { return m.providers, m.err } func (m *mockProxyProviderService) Update(id string, updates map[string]interface{}) error { return nil } func (m *mockProxyProviderService) Delete(id string) error { return nil } type mockProxyStatsService struct{} func (m *mockProxyStatsService) Record(providerID, modelName string) error { return nil } func (m *mockProxyStatsService) Get(providerID, modelName string, startDate, endDate *time.Time) ([]domain.UsageStats, error) { return nil, nil } func (m *mockProxyStatsService) Aggregate(stats []domain.UsageStats, groupBy string) []map[string]interface{} { return nil } func setupProxyEngine(t *testing.T) *conversion.ConversionEngine { t.Helper() registry := conversion.NewMemoryRegistry() engine := conversion.NewConversionEngine(registry, nil) require.NoError(t, registry.Register(openai.NewAdapter())) require.NoError(t, registry.Register(anthropic.NewAdapter())) return engine } func newTestProxyHandler(engine *conversion.ConversionEngine, client *mockProxyProviderClient, routingSvc *mockProxyRoutingService, providerSvc *mockProxyProviderService) *ProxyHandler { return NewProxyHandler( engine, client, routingSvc, providerSvc, &mockProxyStatsService{}, ) } func TestProxyHandler_HandleProxy_MissingProtocol(t *testing.T) { engine := setupProxyEngine(t) h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Request = httptest.NewRequest("POST", "/", bytes.NewReader([]byte(`{}`))) h.HandleProxy(c) assert.Equal(t, 400, w.Code) } func TestProxyHandler_HandleProxy_NonStreamSuccess(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) var resp map[string]any require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp)) assert.Equal(t, "resp-1", resp["id"]) } func TestProxyHandler_HandleProxy_RoutingError_WithBody(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 404, w.Code) } func TestProxyHandler_HandleProxy_ConversionError(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return nil, context.DeadlineExceeded }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleProxy_ClientSendError(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return nil, context.DeadlineExceeded }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleProxy_StreamSuccess(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) { ch := make(chan provider.StreamEvent, 10) go func() { defer close(ch) ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\"}}]}\n\n")} ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")} ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")} ch <- provider.StreamEvent{Done: true} }() return ch, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type")) assert.Contains(t, w.Body.String(), "Hello") } func TestProxyHandler_HandleProxy_StreamError(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) { return nil, context.DeadlineExceeded }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_ForwardPassthrough_GET(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{ providers: []domain.Provider{ {ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } func TestProxyHandler_ForwardPassthrough_UnsupportedProtocol(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{} h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/unknown/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 400, w.Code) } func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{providers: []domain.Provider{}} h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 404, w.Code) } func TestExtractHeaders(t *testing.T) { w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Request = httptest.NewRequest("POST", "/", nil) c.Request.Header.Set("Authorization", "Bearer test") c.Request.Header.Set("Content-Type", "application/json") headers := extractHeaders(c) assert.Equal(t, "Bearer test", headers["Authorization"]) assert.Equal(t, "application/json", headers["Content-Type"]) } func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } func TestProxyHandler_WriteConversionError_NonConversionError(t *testing.T) { engine := setupProxyEngine(t) h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Request = httptest.NewRequest("POST", "/", nil) h.writeConversionError(c, context.DeadlineExceeded, "openai") assert.Equal(t, 500, w.Code) } func TestProxyHandler_WriteConversionError_ConversionError(t *testing.T) { engine := setupProxyEngine(t) h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Request = httptest.NewRequest("POST", "/", nil) convErr := conversion.NewConversionError(conversion.ErrorCodeInvalidInput, "bad request") h.writeConversionError(c, convErr, "openai") assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleProxy_EmptyBody(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{ providers: []domain.Provider{ {ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Body: []byte(`{"object":"list","data":[]}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } func TestProxyHandler_HandleStream_MidStreamError(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) { ch := make(chan provider.StreamEvent, 10) go func() { defer close(ch) ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")} ch <- provider.StreamEvent{Error: fmt.Errorf("connection reset by peer")} }() return ch, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type")) body := w.Body.String() assert.Contains(t, body, "Hello") } func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) { ch := make(chan provider.StreamEvent, 10) go func() { defer close(ch) ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"}}]}\n\n")} ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")} ch <- provider.StreamEvent{Done: true} }() return ch, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type")) assert.Equal(t, "no-cache", w.Header().Get("Cache-Control")) assert.Equal(t, "keep-alive", w.Header().Get("Connection")) body := w.Body.String() assert.Contains(t, body, "Hi") assert.Contains(t, body, "[DONE]") } func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) { registry := conversion.NewMemoryRegistry() engine := conversion.NewConversionEngine(registry, nil) err := registry.Register(openai.NewAdapter()) require.NoError(t, err) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) { registry := conversion.NewMemoryRegistry() engine := conversion.NewConversionEngine(registry, nil) require.NoError(t, registry.Register(openai.NewAdapter())) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) { registry := conversion.NewMemoryRegistry() engine := conversion.NewConversionEngine(registry, nil) require.NoError(t, registry.Register(openai.NewAdapter())) require.NoError(t, registry.Register(anthropic.NewAdapter())) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "claude-3", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`invalid json`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"claude-3","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 500, w.Code) } func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{ result: &domain.RouteResult{ Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true}, Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json", "X-Custom": "test-value"}, Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}} c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`))) h.HandleProxy(c) assert.Equal(t, 200, w.Code) assert.Equal(t, "test-value", w.Header().Get("X-Custom")) assert.Equal(t, "application/json", w.Header().Get("Content-Type")) } func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) { registry := conversion.NewMemoryRegistry() engine := conversion.NewConversionEngine(registry, nil) require.NoError(t, registry.Register(openai.NewAdapter())) anthropicAdapter := anthropic.NewAdapter() require.NoError(t, registry.Register(anthropicAdapter)) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{ providers: []domain.Provider{ {ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic"}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`{"object":"list","data":[]}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } func TestProxyHandler_ForwardPassthrough_NoBody_NoModel(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{ providers: []domain.Provider{ {ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Headers: map[string]string{"Content-Type": "application/json"}, Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } func TestIsStreamRequest_EdgeCases(t *testing.T) { engine := setupProxyEngine(t) h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{}) tests := []struct { name string body string path string expected bool }{ {"stream at end of JSON", `{"messages":[],"stream":true}`, "/v1/chat/completions", true}, {"stream with spaces", `{"stream" : true}`, "/v1/chat/completions", true}, {"stream embedded in string value", `{"model":"stream:true"}`, "/v1/chat/completions", false}, {"empty body", "", "/v1/chat/completions", false}, {"stream true embeddings", `{"model":"text-emb","stream":true}`, "/v1/embeddings", false}, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := h.isStreamRequest([]byte(tt.body), "openai", tt.path) assert.Equal(t, tt.expected, result) }) } } func TestProxyHandler_WriteError_RouteError(t *testing.T) { engine := setupProxyEngine(t) h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{}) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Request = httptest.NewRequest("POST", "/", nil) h.writeError(c, fmt.Errorf("model not found"), "openai") assert.Equal(t, 404, w.Code) } func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) { engine := setupProxyEngine(t) routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound} providerSvc := &mockProxyProviderService{ providers: []domain.Provider{ {ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"}, }, } client := &mockProxyProviderClient{ sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) { return &conversion.HTTPResponseSpec{ StatusCode: 200, Body: []byte(`{"object":"list","data":[]}`), }, nil }, } h := newTestProxyHandler(engine, client, routingSvc, providerSvc) w := httptest.NewRecorder() c, _ := gin.CreateTestContext(w) c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}} c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil) h.HandleProxy(c) assert.Equal(t, 200, w.Code) } // ============ extractModelName 测试 ============ func TestExtractModelName(t *testing.T) { tests := []struct { name string body []byte expected string }{ { name: "valid model", body: []byte(`{"model": "gpt-4", "messages": []}`), expected: "gpt-4", }, { name: "empty body", body: []byte(`{}`), expected: "", }, { name: "invalid json", body: []byte(`{invalid}`), expected: "", }, { name: "nested structure", body: []byte(`{"model": "claude-3", "messages": [{"role": "user", "content": "hello"}]}`), expected: "claude-3", }, { name: "model with special chars", body: []byte(`{"model": "gpt-4-0125-preview", "stream": true}`), expected: "gpt-4-0125-preview", }, { name: "empty body bytes", body: []byte{}, expected: "", }, { name: "model is null", body: []byte(`{"model": null}`), expected: "", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := extractModelName(tt.body) assert.Equal(t, tt.expected, result) }) } } // ============ isStreamRequest 测试 ============ func TestIsStreamRequest(t *testing.T) { engine := setupProxyEngine(t) h := &ProxyHandler{engine: engine} tests := []struct { name string body []byte clientProtocol string nativePath string expected bool }{ { name: "stream true", body: []byte(`{"model": "gpt-4", "stream": true}`), clientProtocol: "openai", nativePath: "/v1/chat/completions", expected: true, }, { name: "stream false", body: []byte(`{"model": "gpt-4", "stream": false}`), clientProtocol: "openai", nativePath: "/v1/chat/completions", expected: false, }, { name: "no stream field", body: []byte(`{"model": "gpt-4"}`), clientProtocol: "openai", nativePath: "/v1/chat/completions", expected: false, }, { name: "invalid json", body: []byte(`{invalid}`), clientProtocol: "openai", nativePath: "/v1/chat/completions", expected: false, }, { name: "not chat endpoint", body: []byte(`{"model": "gpt-4", "stream": true}`), clientProtocol: "openai", nativePath: "/v1/models", expected: false, }, { name: "anthropic stream", body: []byte(`{"model": "claude-3", "stream": true}`), clientProtocol: "anthropic", nativePath: "/v1/messages", expected: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { result := h.isStreamRequest(tt.body, tt.clientProtocol, tt.nativePath) assert.Equal(t, tt.expected, result) }) } }