1
0

fix: 完善转换代理行为

This commit is contained in:
2026-04-26 21:48:17 +08:00
parent 155244433f
commit 9622d44aac
33 changed files with 1127 additions and 1117 deletions

View File

@@ -184,7 +184,7 @@ func TestConversion_OpenAIToAnthropic_NonStream(t *testing.T) {
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -299,7 +299,7 @@ func TestConversion_OpenAIToOpenAI_Passthrough(t *testing.T) {
body, _ := json.Marshal(reqBody)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -382,7 +382,7 @@ func TestConversion_OpenAIToAnthropic_Stream(t *testing.T) {
body, _ := json.Marshal(openaiReq)
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -505,7 +505,7 @@ func TestConversion_ErrorResponse_Format(t *testing.T) {
// OpenAI 协议格式
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.True(t, w.Code >= 400)

View File

@@ -185,7 +185,7 @@ func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -243,7 +243,7 @@ func TestE2E_OpenAI_NonStream_MultiTurn(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -300,7 +300,7 @@ func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -343,7 +343,7 @@ func TestE2E_OpenAI_NonStream_MaxTokens_Length(t *testing.T) {
"max_tokens": 30,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -379,7 +379,7 @@ func TestE2E_OpenAI_NonStream_UsageWithReasoning(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "15+23*2=?"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -420,7 +420,7 @@ func TestE2E_OpenAI_NonStream_Refusal(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "做坏事"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -463,7 +463,7 @@ func TestE2E_OpenAI_Stream_Text(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -517,7 +517,7 @@ func TestE2E_OpenAI_Stream_ToolCalls(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -556,7 +556,7 @@ func TestE2E_OpenAI_Stream_WithUsage(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -980,7 +980,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_RequestFormat(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1063,7 +1063,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1140,7 +1140,7 @@ func TestE2E_OpenAI_ErrorResponse(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1225,7 +1225,7 @@ func TestE2E_OpenAI_NonStream_ParallelToolCalls(t *testing.T) {
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1266,7 +1266,7 @@ func TestE2E_OpenAI_NonStream_StopSequence(t *testing.T) {
"stop": []string{"5"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1303,7 +1303,7 @@ func TestE2E_OpenAI_NonStream_ContentFilter(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "危险内容"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1586,7 +1586,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_NonStream_ToolCalls(t *testing.T) {
}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1657,7 +1657,7 @@ func TestE2E_CrossProtocol_StopReasonMapping(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "长文"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1707,7 +1707,7 @@ func TestE2E_OpenAI_NonStream_AssistantWithToolResult(t *testing.T) {
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1759,7 +1759,7 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1835,7 +1835,7 @@ func TestE2E_OpenAI_Upstream5xx_ErrorPassthrough(t *testing.T) {
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/chat/completions", bytes.NewReader(body))
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
@@ -1917,6 +1917,95 @@ func TestE2E_Anthropic_Stream_TruncatedSSE(t *testing.T) {
assert.Contains(t, respBody, "正常")
}
func TestE2E_OpenAI_Models_LocalAggregation(t *testing.T) {
r, upstream := setupE2ETest(t)
e2eCreateProviderAndModel(t, r, "openai_p", "openai", "gpt-4o", upstream.URL)
w := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/openai/v1/models", nil)
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
data, ok := resp["data"].([]any)
require.True(t, ok)
require.Len(t, data, 1)
model := data[0].(map[string]any)
assert.Equal(t, "openai_p/gpt-4o", model["id"])
assert.Equal(t, "openai_p", model["owned_by"])
}
func TestE2E_OpenAI_Embeddings_SameProtocol(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/embeddings", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
require.NoError(t, json.NewEncoder(w).Encode(map[string]any{
"object": "list",
"data": []map[string]any{{
"index": 0,
"embedding": []float64{0.1, 0.2, 0.3},
}},
"model": "text-embedding-3-small",
"usage": map[string]any{
"prompt_tokens": 3,
"total_tokens": 3,
},
}))
})
e2eCreateProviderAndModel(t, r, "openai_p", "openai", "text-embedding-3-small", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "openai_p/text-embedding-3-small",
"input": "hello",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/embeddings", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "openai_p/text-embedding-3-small", resp["model"])
}
func TestE2E_OpenAI_Rerank_SameProtocol(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/rerank", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
require.NoError(t, json.NewEncoder(w).Encode(map[string]any{
"results": []map[string]any{{
"index": 1,
"relevance_score": 0.98,
"document": "beta",
}},
"model": "rerank-v1",
}))
})
e2eCreateProviderAndModel(t, r, "openai_p", "openai", "rerank-v1", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "openai_p/rerank-v1",
"query": "second",
"documents": []string{"alpha", "beta"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/rerank", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusOK, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "openai_p/rerank-v1", resp["model"])
results, ok := resp["results"].([]any)
require.True(t, ok)
require.Len(t, results, 1)
}
var (
_ = fmt.Sprintf
_ = time.Now