1
0

fix: 完善转换代理行为

This commit is contained in:
2026-04-26 21:48:17 +08:00
parent 155244433f
commit 9622d44aac
33 changed files with 1127 additions and 1117 deletions

View File

@@ -288,19 +288,33 @@ func TestDetectInterfaceType_NonExistentProtocol(t *testing.T) {
func TestConvertHttpRequest_Passthrough(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry, zap.NewNop())
_ = engine.RegisterAdapter(newMockAdapter("openai", true))
openaiAdapter := &buildURLMockAdapter{
mockProtocolAdapter: newMockAdapter("openai", true),
buildURLFn: func(nativePath string, interfaceType InterfaceType) string {
if interfaceType == InterfaceTypeChat {
return "/chat/completions"
}
return nativePath
},
}
openaiAdapter.ifaceType = InterfaceTypeChat
openaiAdapter.supportsIface[InterfaceTypeChat] = true
openaiAdapter.rewriteReqFn = func(body []byte, newModel string, ifaceType InterfaceType) ([]byte, error) {
return []byte(`{"model":"` + newModel + `","messages":[{"role":"user","content":"hi"}]}`), nil
}
_ = engine.RegisterAdapter(openaiAdapter)
provider := NewTargetProvider("https://api.openai.com/v1", "sk-test", "gpt-4")
spec := HTTPRequestSpec{
URL: "/chat/completions",
URL: "/v1/chat/completions",
Method: "POST",
Body: []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`),
Body: []byte(`{"model":"openai/gpt-4","messages":[{"role":"user","content":"hi"}]}`),
}
result, err := engine.ConvertHttpRequest(spec, "openai", "openai", provider)
require.NoError(t, err)
assert.Equal(t, "https://api.openai.com/v1/chat/completions", result.URL)
assert.Equal(t, spec.Body, result.Body)
assert.JSONEq(t, `{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`, string(result.Body))
}
func TestConvertHttpRequest_CrossProtocol(t *testing.T) {
@@ -335,6 +349,77 @@ func TestConvertHttpRequest_CrossProtocol(t *testing.T) {
assert.NotNil(t, result.Body)
}
func TestConvertHttpRequest_UsesProviderAdapterBuildURL(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry, zap.NewNop())
openaiAdapter := &buildURLMockAdapter{
mockProtocolAdapter: newMockAdapter("openai", true),
buildURLFn: func(nativePath string, interfaceType InterfaceType) string {
if interfaceType == InterfaceTypeChat {
return "/chat/completions"
}
return nativePath
},
}
openaiAdapter.ifaceType = InterfaceTypeChat
openaiAdapter.supportsIface[InterfaceTypeChat] = true
openaiAdapter.rewriteReqFn = func(body []byte, newModel string, ifaceType InterfaceType) ([]byte, error) {
return []byte(`{"model":"` + newModel + `"}`), nil
}
require.NoError(t, registry.Register(openaiAdapter))
anthropicAdapter := &buildURLMockAdapter{
mockProtocolAdapter: newMockAdapter("anthropic", false),
buildURLFn: func(nativePath string, interfaceType InterfaceType) string {
if interfaceType == InterfaceTypeChat {
return "/v1/messages"
}
return nativePath
},
}
anthropicAdapter.ifaceType = InterfaceTypeChat
anthropicAdapter.supportsIface[InterfaceTypeChat] = true
require.NoError(t, registry.Register(anthropicAdapter))
t.Run("OpenAI to Anthropic", func(t *testing.T) {
provider := NewTargetProvider("https://api.anthropic.com", "key", "claude-3")
spec := HTTPRequestSpec{
URL: "/v1/chat/completions",
Method: "POST",
Body: []byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"max_tokens":16}`),
}
result, err := engine.ConvertHttpRequest(spec, "openai", "anthropic", provider)
require.NoError(t, err)
assert.Equal(t, "https://api.anthropic.com/v1/messages", result.URL)
})
t.Run("Anthropic to OpenAI", func(t *testing.T) {
provider := NewTargetProvider("https://api.openai.com/v1", "key", "gpt-4")
spec := HTTPRequestSpec{
URL: "/v1/messages",
Method: "POST",
Body: []byte(`{"model":"p1/claude-3","max_tokens":16,"messages":[{"role":"user","content":"hi"}]}`),
}
result, err := engine.ConvertHttpRequest(spec, "anthropic", "openai", provider)
require.NoError(t, err)
assert.Equal(t, "https://api.openai.com/v1/chat/completions", result.URL)
})
}
type buildURLMockAdapter struct {
*mockProtocolAdapter
buildURLFn func(string, InterfaceType) string
}
func (m *buildURLMockAdapter) BuildUrl(nativePath string, interfaceType InterfaceType) string {
if m.buildURLFn != nil {
return m.buildURLFn(nativePath, interfaceType)
}
return m.mockProtocolAdapter.BuildUrl(nativePath, interfaceType)
}
func TestConvertHttpResponse_Passthrough(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry, zap.NewNop())

View File

@@ -29,27 +29,27 @@ func (a *Adapter) SupportsPassthrough() bool { return true }
// DetectInterfaceType 根据路径检测接口类型
func (a *Adapter) DetectInterfaceType(nativePath string) conversion.InterfaceType {
switch {
case nativePath == "/chat/completions":
case nativePath == "/v1/chat/completions":
return conversion.InterfaceTypeChat
case nativePath == "/models":
case nativePath == "/v1/models":
return conversion.InterfaceTypeModels
case isModelInfoPath(nativePath):
return conversion.InterfaceTypeModelInfo
case nativePath == "/embeddings":
case nativePath == "/v1/embeddings":
return conversion.InterfaceTypeEmbeddings
case nativePath == "/rerank":
case nativePath == "/v1/rerank":
return conversion.InterfaceTypeRerank
default:
return conversion.InterfaceTypePassthrough
}
}
// isModelInfoPath 判断是否为模型详情路径(/models/{id},允许 id 含 /
// isModelInfoPath 判断是否为模型详情路径(/v1/models/{id},允许 id 含 /
func isModelInfoPath(path string) bool {
if !strings.HasPrefix(path, "/models/") {
if !strings.HasPrefix(path, "/v1/models/") {
return false
}
suffix := path[len("/models/"):]
suffix := path[len("/v1/models/"):]
return suffix != ""
}
@@ -60,6 +60,11 @@ func (a *Adapter) BuildUrl(nativePath string, interfaceType conversion.Interface
return "/chat/completions"
case conversion.InterfaceTypeModels:
return "/models"
case conversion.InterfaceTypeModelInfo:
if modelID, err := a.ExtractUnifiedModelID(nativePath); err == nil {
return "/models/" + modelID
}
return nativePath
case conversion.InterfaceTypeEmbeddings:
return "/embeddings"
case conversion.InterfaceTypeRerank:
@@ -221,12 +226,12 @@ func (a *Adapter) EncodeRerankResponse(resp *canonical.CanonicalRerankResponse)
return encodeRerankResponse(resp)
}
// ExtractUnifiedModelID 从路径中提取统一模型 ID/models/{provider_id}/{model_name}
// ExtractUnifiedModelID 从路径中提取统一模型 ID/v1/models/{provider_id}/{model_name}
func (a *Adapter) ExtractUnifiedModelID(nativePath string) (string, error) {
if !strings.HasPrefix(nativePath, "/models/") {
if !strings.HasPrefix(nativePath, "/v1/models/") {
return "", fmt.Errorf("不是模型详情路径: %s", nativePath)
}
suffix := nativePath[len("/models/"):]
suffix := nativePath[len("/v1/models/"):]
if suffix == "" {
return "", fmt.Errorf("路径缺少模型 ID")
}

View File

@@ -28,11 +28,11 @@ func TestAdapter_DetectInterfaceType(t *testing.T) {
path string
expected conversion.InterfaceType
}{
{"聊天补全", "/chat/completions", conversion.InterfaceTypeChat},
{"模型列表", "/models", conversion.InterfaceTypeModels},
{"模型详情", "/models/gpt-4", conversion.InterfaceTypeModelInfo},
{"嵌入接口", "/embeddings", conversion.InterfaceTypeEmbeddings},
{"重排序接口", "/rerank", conversion.InterfaceTypeRerank},
{"聊天补全", "/v1/chat/completions", conversion.InterfaceTypeChat},
{"模型列表", "/v1/models", conversion.InterfaceTypeModels},
{"模型详情", "/v1/models/openai/gpt-4", conversion.InterfaceTypeModelInfo},
{"嵌入接口", "/v1/embeddings", conversion.InterfaceTypeEmbeddings},
{"重排序接口", "/v1/rerank", conversion.InterfaceTypeRerank},
{"未知路径", "/unknown", conversion.InterfaceTypePassthrough},
}
@@ -44,20 +44,18 @@ func TestAdapter_DetectInterfaceType(t *testing.T) {
}
}
func TestAdapter_APIReferenceNativePaths(t *testing.T) {
func TestAdapter_OldPathsBecomePassthrough(t *testing.T) {
a := NewAdapter()
// docs/api_reference/openai, excluding responses, defines paths without /v1.
tests := []struct {
path string
expected conversion.InterfaceType
}{
{"/chat/completions", conversion.InterfaceTypeChat},
{"/models", conversion.InterfaceTypeModels},
{"/models/gpt-4.1", conversion.InterfaceTypeModelInfo},
{"/embeddings", conversion.InterfaceTypeEmbeddings},
{"/rerank", conversion.InterfaceTypeRerank},
{"/v1/chat/completions", conversion.InterfaceTypePassthrough},
{"/chat/completions", conversion.InterfaceTypePassthrough},
{"/models", conversion.InterfaceTypePassthrough},
{"/models/gpt-4.1", conversion.InterfaceTypePassthrough},
{"/embeddings", conversion.InterfaceTypePassthrough},
{"/rerank", conversion.InterfaceTypePassthrough},
}
for _, tt := range tests {
@@ -76,10 +74,12 @@ func TestAdapter_BuildUrl(t *testing.T) {
interfaceType conversion.InterfaceType
expected string
}{
{"聊天", "/chat/completions", conversion.InterfaceTypeChat, "/chat/completions"},
{"模型", "/models", conversion.InterfaceTypeModels, "/models"},
{"嵌入", "/embeddings", conversion.InterfaceTypeEmbeddings, "/embeddings"},
{"重排序", "/rerank", conversion.InterfaceTypeRerank, "/rerank"},
{"聊天", "/v1/chat/completions", conversion.InterfaceTypeChat, "/chat/completions"},
{"模型", "/v1/models", conversion.InterfaceTypeModels, "/models"},
{"模型详情", "/v1/models/openai/gpt-4", conversion.InterfaceTypeModelInfo, "/models/openai/gpt-4"},
{"复杂模型详情", "/v1/models/azure/accounts/org/models/gpt-4", conversion.InterfaceTypeModelInfo, "/models/azure/accounts/org/models/gpt-4"},
{"嵌入", "/v1/embeddings", conversion.InterfaceTypeEmbeddings, "/embeddings"},
{"重排序", "/v1/rerank", conversion.InterfaceTypeRerank, "/rerank"},
{"默认透传", "/other", conversion.InterfaceTypePassthrough, "/other"},
}
@@ -141,12 +141,12 @@ func TestIsModelInfoPath(t *testing.T) {
path string
expected bool
}{
{"model_info", "/models/gpt-4", true},
{"model_info_with_dots", "/models/gpt-4.1-preview", true},
{"models_list", "/models", false},
{"nested_path", "/models/gpt-4/versions", true},
{"empty_suffix", "/models/", false},
{"unrelated", "/chat/completions", false},
{"model_info", "/v1/models/openai/gpt-4", true},
{"model_info_with_dots", "/v1/models/openai/gpt-4.1-preview", true},
{"models_list", "/v1/models", false},
{"nested_path", "/v1/models/azure/accounts/org-123/models/gpt-4", true},
{"empty_suffix", "/v1/models/", false},
{"unrelated", "/v1/chat/completions", false},
{"partial_prefix", "/model", false},
}
@@ -157,6 +157,27 @@ func TestIsModelInfoPath(t *testing.T) {
}
}
func TestAdapter_ExtractUnifiedModelID(t *testing.T) {
a := NewAdapter()
t.Run("标准路径", func(t *testing.T) {
modelID, err := a.ExtractUnifiedModelID("/v1/models/openai/gpt-4")
require.NoError(t, err)
assert.Equal(t, "openai/gpt-4", modelID)
})
t.Run("复杂路径", func(t *testing.T) {
modelID, err := a.ExtractUnifiedModelID("/v1/models/azure/accounts/org/models/gpt-4")
require.NoError(t, err)
assert.Equal(t, "azure/accounts/org/models/gpt-4", modelID)
})
t.Run("非模型详情路径报错", func(t *testing.T) {
_, err := a.ExtractUnifiedModelID("/v1/models")
require.Error(t, err)
})
}
func TestAdapter_EncodeError_InvalidInput(t *testing.T) {
a := NewAdapter()
convErr := conversion.NewConversionError(conversion.ErrorCodeInvalidInput, "参数无效")

View File

@@ -18,35 +18,35 @@ func TestExtractUnifiedModelID(t *testing.T) {
a := NewAdapter()
t.Run("standard_path", func(t *testing.T) {
id, err := a.ExtractUnifiedModelID("/models/openai/gpt-4")
id, err := a.ExtractUnifiedModelID("/v1/models/openai/gpt-4")
require.NoError(t, err)
assert.Equal(t, "openai/gpt-4", id)
})
t.Run("multi_segment_path", func(t *testing.T) {
id, err := a.ExtractUnifiedModelID("/models/azure/accounts/org/models/gpt-4")
id, err := a.ExtractUnifiedModelID("/v1/models/azure/accounts/org/models/gpt-4")
require.NoError(t, err)
assert.Equal(t, "azure/accounts/org/models/gpt-4", id)
})
t.Run("single_segment", func(t *testing.T) {
id, err := a.ExtractUnifiedModelID("/models/gpt-4")
id, err := a.ExtractUnifiedModelID("/v1/models/gpt-4")
require.NoError(t, err)
assert.Equal(t, "gpt-4", id)
})
t.Run("non_model_path", func(t *testing.T) {
_, err := a.ExtractUnifiedModelID("/chat/completions")
_, err := a.ExtractUnifiedModelID("/v1/chat/completions")
require.Error(t, err)
})
t.Run("empty_suffix", func(t *testing.T) {
_, err := a.ExtractUnifiedModelID("/models/")
_, err := a.ExtractUnifiedModelID("/v1/models/")
require.Error(t, err)
})
t.Run("models_list_no_slash", func(t *testing.T) {
_, err := a.ExtractUnifiedModelID("/models")
_, err := a.ExtractUnifiedModelID("/v1/models")
require.Error(t, err)
})
@@ -344,12 +344,12 @@ func TestIsModelInfoPath_UnifiedModelID(t *testing.T) {
path string
expected bool
}{
{"simple_model_id", "/models/gpt-4", true},
{"unified_model_id_with_slash", "/models/openai/gpt-4", true},
{"models_list", "/models", false},
{"models_list_trailing_slash", "/models/", false},
{"chat_completions", "/chat/completions", false},
{"deeply_nested", "/models/azure/eastus/deployments/my-dept/models/gpt-4", true},
{"simple_model_id", "/v1/models/gpt-4", true},
{"unified_model_id_with_slash", "/v1/models/openai/gpt-4", true},
{"models_list", "/v1/models", false},
{"models_list_trailing_slash", "/v1/models/", false},
{"chat_completions", "/v1/chat/completions", false},
{"deeply_nested", "/v1/models/azure/eastus/deployments/my-dept/models/gpt-4", true},
}
for _, tt := range tests {

View File

@@ -93,8 +93,8 @@ func TestProxyHandler_HandleProxy_NonStreamSuccess(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -118,8 +118,8 @@ func TestProxyHandler_HandleProxy_RoutingError_WithBody(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 404, w.Code)
@@ -146,8 +146,8 @@ func TestProxyHandler_HandleProxy_ConversionError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 502, w.Code)
@@ -174,8 +174,8 @@ func TestProxyHandler_HandleProxy_ClientSendError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 502, w.Code)
@@ -211,8 +211,8 @@ func TestProxyHandler_HandleProxy_StreamSuccess(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -241,8 +241,8 @@ func TestProxyHandler_HandleProxy_StreamError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 502, w.Code)
@@ -266,8 +266,8 @@ func TestProxyHandler_ForwardPassthrough_GET(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -287,7 +287,7 @@ func TestProxyHandler_ForwardPassthrough_UnsupportedProtocol(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/models"}}
c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/unknown/models", nil)
h.HandleProxy(c)
@@ -309,8 +309,8 @@ func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -352,8 +352,8 @@ func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -449,8 +449,8 @@ func TestProxyHandler_HandleProxy_EmptyBody(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -483,8 +483,8 @@ func TestProxyHandler_HandleStream_MidStreamError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -521,8 +521,8 @@ func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -555,8 +555,8 @@ func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
@@ -582,8 +582,8 @@ func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
@@ -617,8 +617,8 @@ func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
@@ -649,8 +649,8 @@ func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -681,8 +681,8 @@ func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -705,8 +705,8 @@ func TestProxyHandler_ForwardPassthrough_NoBody_NoModel(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -729,10 +729,10 @@ func TestIsStreamRequest_EdgeCases(t *testing.T) {
path string
expected bool
}{
{"stream at end of JSON", `{"messages":[],"stream":true}`, "/chat/completions", true},
{"stream with spaces", `{"stream" : true}`, "/chat/completions", true},
{"stream embedded in string value", `{"model":"stream:true"}`, "/chat/completions", false},
{"empty body", "", "/chat/completions", false},
{"stream at end of JSON", `{"messages":[],"stream":true}`, "/v1/chat/completions", true},
{"stream with spaces", `{"stream" : true}`, "/v1/chat/completions", true},
{"stream embedded in string value", `{"model":"stream:true"}`, "/v1/chat/completions", false},
{"empty body", "", "/v1/chat/completions", false},
{"stream true embeddings", `{"model":"text-emb","stream":true}`, "/v1/embeddings", false},
}
@@ -781,8 +781,8 @@ func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -805,35 +805,35 @@ func TestIsStreamRequest(t *testing.T) {
name: "stream true",
body: []byte(`{"model": "gpt-4", "stream": true}`),
clientProtocol: "openai",
nativePath: "/chat/completions",
nativePath: "/v1/chat/completions",
expected: true,
},
{
name: "stream false",
body: []byte(`{"model": "gpt-4", "stream": false}`),
clientProtocol: "openai",
nativePath: "/chat/completions",
nativePath: "/v1/chat/completions",
expected: false,
},
{
name: "no stream field",
body: []byte(`{"model": "gpt-4"}`),
clientProtocol: "openai",
nativePath: "/chat/completions",
nativePath: "/v1/chat/completions",
expected: false,
},
{
name: "invalid json",
body: []byte(`{invalid}`),
clientProtocol: "openai",
nativePath: "/chat/completions",
nativePath: "/v1/chat/completions",
expected: false,
},
{
name: "not chat endpoint",
body: []byte(`{"model": "gpt-4", "stream": true}`),
clientProtocol: "openai",
nativePath: "/models",
nativePath: "/v1/models",
expected: false,
},
{
@@ -871,8 +871,8 @@ func TestProxyHandler_HandleProxy_Models_LocalAggregation(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/models", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -902,8 +902,8 @@ func TestProxyHandler_HandleProxy_ModelInfo_LocalQuery(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/openai/gpt-4"}}
c.Request = httptest.NewRequest("GET", "/openai/models/openai/gpt-4", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models/openai/gpt-4"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models/openai/gpt-4", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -936,8 +936,8 @@ func TestProxyHandler_HandleProxy_Models_EmptySuffix_ForwardPassthrough(t *testi
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models/"}}
c.Request = httptest.NewRequest("GET", "/openai/models/", nil)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/models/"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models/", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -974,8 +974,8 @@ func TestProxyHandler_HandleProxy_SmartPassthrough_UnifiedID(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -1012,8 +1012,8 @@ func TestProxyHandler_HandleProxy_CrossProtocol_NonStream_UnifiedID(t *testing.T
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -1061,8 +1061,8 @@ data: {"type":"message_stop"}
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"anthropic_p/claude-3","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -1099,8 +1099,8 @@ func TestProxyHandler_HandleProxy_SmartPassthrough_Fidelity(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}],"custom_param":"should_be_preserved"}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"openai_p/gpt-4","messages":[{"role":"user","content":"hi"}],"custom_param":"should_be_preserved"}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
@@ -1130,8 +1130,8 @@ func TestProxyHandler_HandleProxy_UnifiedID_ModelNotFound(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown/model","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 404, w.Code)
@@ -1154,10 +1154,10 @@ func TestProxyHandler_HandleProxy_OpenAIAndAnthropicNativePaths(t *testing.T) {
responseModel string
}{
{
name: "openai path has no v1 after gateway prefix",
name: "openai path keeps v1 after gateway prefix",
protocol: "openai",
path: "/chat/completions",
requestPath: "/openai/chat/completions",
path: "/v1/chat/completions",
requestPath: "/openai/v1/chat/completions",
baseURL: "https://api.test.com/v1",
expectedURL: "https://api.test.com/v1/chat/completions",
body: `{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`,
@@ -1240,8 +1240,8 @@ func TestProxyHandler_UpstreamNon2xx_Passthrough(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
require.Equal(t, http.StatusTooManyRequests, w.Code)
@@ -1272,8 +1272,8 @@ func TestProxyHandler_StreamUpstreamNon2xx_Passthrough(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
require.Equal(t, http.StatusServiceUnavailable, w.Code)
@@ -1347,8 +1347,8 @@ func TestProxyHandler_InvalidJSON_UsesGatewayError(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":`)))
h.HandleProxy(c)
require.Equal(t, http.StatusBadRequest, w.Code)
@@ -1373,8 +1373,8 @@ func TestProxyHandler_CrossProtocolMultimodal_Unsupported(t *testing.T) {
body := []byte(`{"model":"anthropic_p/claude","messages":[{"role":"user","content":[{"type":"text","text":"describe"},{"type":"image_url","image_url":{"url":"data:image/png;base64,abc"}}]}]}`)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
h.HandleProxy(c)
require.Equal(t, http.StatusBadRequest, w.Code)
@@ -1409,8 +1409,8 @@ func TestProxyHandler_SameProtocolMultimodal_SmartPassthrough(t *testing.T) {
body := []byte(`{"model":"p1/gpt-4","messages":[{"role":"user","content":[{"type":"text","text":"describe"},{"type":"image_url","image_url":{"url":"data:image/png;base64,abc"}}]}]}`)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
h.HandleProxy(c)
require.Equal(t, http.StatusOK, w.Code)
@@ -1444,8 +1444,8 @@ func TestProxyHandler_RawStreamPassthrough_PreservesSSEFrames(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/v1/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
require.Equal(t, http.StatusOK, w.Code)