1
0
Files
nex/backend/tests/integration/e2e_conversion_test.go
lanyuanxiaoyao bc1ee612d9 refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包
- 新增 ConversionEngine 核心引擎,支持 OpenAI 和 Anthropic 协议转换
- 添加 stream decoder/encoder 实现
- 更新 provider client 支持新引擎
- 补充单元测试和集成测试
- 更新 specs 文档
2026-04-20 13:02:28 +08:00

1912 lines
76 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
package integration
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"net/http"
"net/http/httptest"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"nex/backend/internal/config"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/anthropic"
openaiConv "nex/backend/internal/conversion/openai"
"nex/backend/internal/handler"
"nex/backend/internal/handler/middleware"
"nex/backend/internal/provider"
"nex/backend/internal/repository"
"nex/backend/internal/service"
)
func setupE2ETest(t *testing.T) (*gin.Engine, *httptest.Server) {
t.Helper()
gin.SetMode(gin.TestMode)
upstream := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{"error":"not mocked"}`))
}))
dir, _ := os.MkdirTemp("", "e2e-test-*")
db, err := gorm.Open(sqlite.Open(filepath.Join(dir, "test.db")), &gorm.Config{})
require.NoError(t, err)
err = db.AutoMigrate(&config.Provider{}, &config.Model{}, &config.UsageStats{})
require.NoError(t, err)
t.Cleanup(func() {
sqlDB, _ := db.DB()
if sqlDB != nil {
sqlDB.Close()
}
upstream.Close()
os.RemoveAll(dir)
})
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
statsRepo := repository.NewStatsRepository(db)
providerService := service.NewProviderService(providerRepo)
modelService := service.NewModelService(modelRepo, providerRepo)
routingService := service.NewRoutingService(modelRepo, providerRepo)
statsService := service.NewStatsService(statsRepo)
registry := conversion.NewMemoryRegistry()
require.NoError(t, registry.Register(openaiConv.NewAdapter()))
require.NoError(t, registry.Register(anthropic.NewAdapter()))
engine := conversion.NewConversionEngine(registry)
providerClient := provider.NewClient()
proxyHandler := handler.NewProxyHandler(engine, providerClient, routingService, providerService, statsService)
providerHandler := handler.NewProviderHandler(providerService)
modelHandler := handler.NewModelHandler(modelService)
_ = modelService
r := gin.New()
r.Use(middleware.CORS())
r.Any("/:protocol/v1/*path", proxyHandler.HandleProxy)
providers := r.Group("/api/providers")
{
providers.POST("", providerHandler.CreateProvider)
}
models := r.Group("/api/models")
{
models.POST("", modelHandler.CreateModel)
}
_ = statsService
return r, upstream
}
func e2eCreateProviderAndModel(t *testing.T, r *gin.Engine, providerID, protocol, modelName, upstreamURL string) {
t.Helper()
providerBody, _ := json.Marshal(map[string]string{
"id": providerID, "name": providerID, "api_key": "test-key",
"base_url": upstreamURL, "protocol": protocol,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/api/providers", bytes.NewReader(providerBody))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
modelBody, _ := json.Marshal(map[string]string{
"id": modelName, "provider_id": providerID, "model_name": modelName,
})
w = httptest.NewRecorder()
req = httptest.NewRequest("POST", "/api/models", bytes.NewReader(modelBody))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
require.Equal(t, 201, w.Code)
}
func parseSSEEvents(body string) []map[string]string {
var events []map[string]string
scanner := bufio.NewScanner(strings.NewReader(body))
var currentEvent, currentData string
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "event: ") {
currentEvent = strings.TrimPrefix(line, "event: ")
} else if strings.HasPrefix(line, "data: ") {
currentData = strings.TrimPrefix(line, "data: ")
} else if line == "" && (currentEvent != "" || currentData != "") {
events = append(events, map[string]string{
"event": currentEvent,
"data": currentData,
})
currentEvent = ""
currentData = ""
}
}
return events
}
func parseOpenAIStreamChunks(body string) []string {
var chunks []string
scanner := bufio.NewScanner(strings.NewReader(body))
for scanner.Scan() {
line := scanner.Text()
if strings.HasPrefix(line, "data: ") {
payload := strings.TrimPrefix(line, "data: ")
if payload == "[DONE]" {
chunks = append(chunks, "[DONE]")
} else {
chunks = append(chunks, payload)
}
}
}
return chunks
}
// ============================================================
// OpenAI 非流式端到端测试
// ============================================================
func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-001",
"object": "chat.completion",
"created": 1700000000,
"model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "你好我是AI助手。"},
"finish_reason": "stop",
"logprobs": nil,
}},
"usage": map[string]any{
"prompt_tokens": 15, "completion_tokens": 10, "total_tokens": 25,
},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{
{"role": "user", "content": "你好"},
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "chat.completion", resp["object"])
assert.Equal(t, "gpt-4o", resp["model"])
choices := resp["choices"].([]any)
require.Len(t, choices, 1)
choice := choices[0].(map[string]any)
assert.Equal(t, float64(0), choice["index"])
msg := choice["message"].(map[string]any)
assert.Equal(t, "assistant", msg["role"])
assert.Equal(t, "你好我是AI助手。", msg["content"])
assert.Equal(t, "stop", choice["finish_reason"])
usage := resp["usage"].(map[string]any)
assert.Equal(t, float64(15), usage["prompt_tokens"])
assert.Equal(t, float64(10), usage["completion_tokens"])
assert.Equal(t, float64(25), usage["total_tokens"])
}
func TestE2E_OpenAI_NonStream_MultiTurn(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
msgs := reqBody["messages"].([]any)
assert.GreaterOrEqual(t, len(msgs), 3)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-002", "object": "chat.completion", "created": 1700000001, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0, "message": map[string]any{"role": "assistant", "content": "Go语言的interface是隐式实现的。"},
"finish_reason": "stop", "logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 100, "completion_tokens": 20, "total_tokens": 120},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{
{"role": "system", "content": "你是编程助手"},
{"role": "user", "content": "什么是interface?"},
{"role": "assistant", "content": "Interface定义了一组方法签名。"},
{"role": "user", "content": "举个例子"},
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Contains(t, resp["choices"].([]any)[0].(map[string]any)["message"].(map[string]any)["content"], "interface")
}
func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-004", "object": "chat.completion", "created": 1700000003, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{
"role": "assistant",
"content": nil,
"tool_calls": []map[string]any{{
"id": "call_e2e_001",
"type": "function",
"function": map[string]any{
"name": "get_weather",
"arguments": `{"city":"北京"}`,
},
}},
},
"finish_reason": "tool_calls",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 80, "completion_tokens": 18, "total_tokens": 98},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{
{"role": "user", "content": "北京天气"},
},
"tools": []map[string]any{{
"type": "function",
"function": map[string]any{
"name": "get_weather", "description": "获取天气",
"parameters": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
"required": []string{"city"},
},
},
}},
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "tool_calls", choice["finish_reason"])
msg := choice["message"].(map[string]any)
toolCalls := msg["tool_calls"].([]any)
require.Len(t, toolCalls, 1)
tc := toolCalls[0].(map[string]any)
assert.Equal(t, "call_e2e_001", tc["id"])
assert.Equal(t, "function", tc["type"])
fn := tc["function"].(map[string]any)
assert.Equal(t, "get_weather", fn["name"])
assert.Contains(t, fn["arguments"], "北京")
}
func TestE2E_OpenAI_NonStream_MaxTokens_Length(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-014", "object": "chat.completion", "created": 1700000014, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "人工智能起源于1950年代..."},
"finish_reason": "length",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 20, "completion_tokens": 30, "total_tokens": 50},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "介绍AI历史"}},
"max_tokens": 30,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "length", choice["finish_reason"])
}
func TestE2E_OpenAI_NonStream_UsageWithReasoning(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-022", "object": "chat.completion", "created": 1700000022, "model": "o3",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "答案是61。"},
"finish_reason": "stop",
"logprobs": nil,
}},
"usage": map[string]any{
"prompt_tokens": 35, "completion_tokens": 48, "total_tokens": 83,
"completion_tokens_details": map[string]any{"reasoning_tokens": 20},
},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "o3", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "o3",
"messages": []map[string]any{{"role": "user", "content": "15+23*2=?"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
usage := resp["usage"].(map[string]any)
assert.Equal(t, float64(48), usage["completion_tokens"])
details, ok := usage["completion_tokens_details"].(map[string]any)
if ok {
assert.Equal(t, float64(20), details["reasoning_tokens"])
}
}
func TestE2E_OpenAI_NonStream_Refusal(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-007", "object": "chat.completion", "created": 1700000007, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{
"role": "assistant",
"content": nil,
"refusal": "抱歉,我无法提供涉及危险活动的信息。",
},
"finish_reason": "stop",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 12, "completion_tokens": 35, "total_tokens": 47},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "做坏事"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
msg := resp["choices"].([]any)[0].(map[string]any)["message"].(map[string]any)
assert.NotNil(t, msg["refusal"])
}
// ============================================================
// OpenAI 流式端到端测试
// ============================================================
func TestE2E_OpenAI_Stream_Text(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`data: {"id":"chatcmpl-stream-e2e","object":"chat.completion.chunk","created":1700000019,"model":"gpt-4o","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-e2e","object":"chat.completion.chunk","created":1700000019,"model":"gpt-4o","choices":[{"index":0,"delta":{"content":"你"},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-e2e","object":"chat.completion.chunk","created":1700000019,"model":"gpt-4o","choices":[{"index":0,"delta":{"content":"好"},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-e2e","object":"chat.completion.chunk","created":1700000019,"model":"gpt-4o","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}`,
`data: [DONE]`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "你好"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Header().Get("Content-Type"), "text/event-stream")
respBody := w.Body.String()
assert.Contains(t, respBody, "chat.completion.chunk")
assert.Contains(t, respBody, `"role":"assistant"`)
assert.Contains(t, respBody, `"content":"你"`)
assert.Contains(t, respBody, `"content":"好"`)
assert.Contains(t, respBody, `"finish_reason":"stop"`)
}
func TestE2E_OpenAI_Stream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`data: {"id":"chatcmpl-stream-tc","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"role":"assistant","content":null},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-tc","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"id":"call_tc1","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-tc","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"city\":"}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-tc","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"北京\"}"}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-tc","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]}`,
`data: [DONE]`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"type": "function",
"function": map[string]any{
"name": "get_weather", "description": "获取天气",
"parameters": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
},
},
}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "tool_calls")
assert.Contains(t, respBody, "get_weather")
assert.Contains(t, respBody, "tool_calls")
}
func TestE2E_OpenAI_Stream_WithUsage(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`data: {"id":"chatcmpl-stream-u","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-u","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{"content":"Hi"},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-stream-u","object":"chat.completion.chunk","model":"gpt-4o","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}`,
`data: {"id":"chatcmpl-stream-u","object":"chat.completion.chunk","model":"gpt-4o","choices":[],"usage":{"prompt_tokens":10,"completion_tokens":5,"total_tokens":15}}`,
`data: [DONE]`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "hi"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, `"Hi"`)
assert.Contains(t, respBody, `"finish_reason":"stop"`)
}
// ============================================================
// Anthropic 非流式端到端测试
// ============================================================
func TestE2E_Anthropic_NonStream_BasicText(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_001", "type": "message", "role": "assistant",
"content": []map[string]any{
{"type": "text", "text": "你好我是Claude由Anthropic开发的AI助手。"},
},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 15, "output_tokens": 25},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "你好"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "message", resp["type"])
assert.Equal(t, "assistant", resp["role"])
assert.Equal(t, "claude-opus-4-7", resp["model"])
assert.Equal(t, "end_turn", resp["stop_reason"])
content := resp["content"].([]any)
require.Len(t, content, 1)
block := content[0].(map[string]any)
assert.Equal(t, "text", block["type"])
assert.Contains(t, block["text"], "Claude")
usage := resp["usage"].(map[string]any)
assert.Equal(t, float64(15), usage["input_tokens"])
assert.Equal(t, float64(25), usage["output_tokens"])
}
func TestE2E_Anthropic_NonStream_WithSystem(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
assert.NotNil(t, reqBody["system"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_003", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "递归是函数调用自身。"}},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 30, "output_tokens": 15},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"system": "你是编程助手",
"messages": []map[string]any{{"role": "user", "content": "什么是递归?"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
}
func TestE2E_Anthropic_NonStream_ToolUse(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_009", "type": "message", "role": "assistant",
"content": []map[string]any{{
"type": "tool_use", "id": "toolu_e2e_009", "name": "get_weather",
"input": map[string]any{"city": "北京"},
}},
"model": "claude-opus-4-7", "stop_reason": "tool_use", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 180, "output_tokens": 42},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"name": "get_weather", "description": "获取天气",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
"required": []string{"city"},
},
}},
"tool_choice": map[string]any{"type": "auto"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "tool_use", resp["stop_reason"])
content := resp["content"].([]any)
require.Len(t, content, 1)
block := content[0].(map[string]any)
assert.Equal(t, "tool_use", block["type"])
assert.Equal(t, "toolu_e2e_009", block["id"])
assert.Equal(t, "get_weather", block["name"])
}
func TestE2E_Anthropic_NonStream_Thinking(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_018", "type": "message", "role": "assistant",
"content": []map[string]any{
{"type": "thinking", "thinking": "这是一个逻辑推理问题..."},
{"type": "text", "text": "答案是61。"},
},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 95, "output_tokens": 280},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 4096,
"messages": []map[string]any{{"role": "user", "content": "15+23*2=?"}},
"thinking": map[string]any{"type": "enabled", "budget_tokens": 2048},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
content := resp["content"].([]any)
require.Len(t, content, 2)
assert.Equal(t, "thinking", content[0].(map[string]any)["type"])
assert.Equal(t, "text", content[1].(map[string]any)["type"])
}
func TestE2E_Anthropic_NonStream_MaxTokens(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_016", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "人工智能起源于..."}},
"model": "claude-opus-4-7", "stop_reason": "max_tokens", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 22, "output_tokens": 20},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 20,
"messages": []map[string]any{{"role": "user", "content": "介绍AI历史"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "max_tokens", resp["stop_reason"])
}
func TestE2E_Anthropic_NonStream_StopSequence(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_017", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "1\n2\n3\n4\n"}},
"model": "claude-opus-4-7", "stop_reason": "stop_sequence", "stop_sequence": "5",
"usage": map[string]any{"input_tokens": 22, "output_tokens": 10},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "从1数到10"}},
"stop_sequences": []string{"5"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "stop_sequence", resp["stop_reason"])
assert.Equal(t, "5", resp["stop_sequence"])
}
func TestE2E_Anthropic_NonStream_MetadataUserID(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
metadata, _ := reqBody["metadata"].(map[string]any)
assert.Equal(t, "user_12345", metadata["user_id"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_026", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "你好!"}},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 12, "output_tokens": 5},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "你好"}},
"metadata": map[string]any{"user_id": "user_12345"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
}
func TestE2E_Anthropic_NonStream_UsageWithCache(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_025", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "你好!"}},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{
"input_tokens": 25, "output_tokens": 5,
"cache_creation_input_tokens": 15, "cache_read_input_tokens": 0,
},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"system": []map[string]any{{"type": "text", "text": "你是编程助手。"}},
"messages": []map[string]any{{"role": "user", "content": "你好"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
usage := resp["usage"].(map[string]any)
assert.Equal(t, float64(15), usage["cache_creation_input_tokens"])
}
// ============================================================
// Anthropic 流式端到端测试
// ============================================================
func TestE2E_Anthropic_Stream_Text(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
"event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream_e2e\",\"role\":\"assistant\",\"content\":[],\"model\":\"claude-opus-4-7\",\"usage\":{\"input_tokens\":10,\"output_tokens\":0}}}\n\n",
"event: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"你\"}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"好\"}}\n\n",
"event: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
"event: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"end_turn\",\"stop_sequence\":null},\"usage\":{\"output_tokens\":5}}\n\n",
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n",
}
for _, e := range events {
w.Write([]byte(e))
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "你好"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Header().Get("Content-Type"), "text/event-stream")
respBody := w.Body.String()
assert.Contains(t, respBody, "message_start")
assert.Contains(t, respBody, "content_block_delta")
assert.Contains(t, respBody, "text_delta")
assert.Contains(t, respBody, "你")
assert.Contains(t, respBody, "message_stop")
}
func TestE2E_Anthropic_Stream_Thinking(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`event: message_start` + "\n" + `data: {"type":"message_start","message":{"id":"msg_stream_think","role":"assistant","model":"claude-opus-4-7","usage":{"input_tokens":30,"output_tokens":0}}}`,
`event: content_block_start` + "\n" + `data: {"type":"content_block_start","index":0,"content_block":{"type":"thinking","thinking":""}}`,
`event: content_block_delta` + "\n" + `data: {"type":"content_block_delta","index":0,"delta":{"type":"thinking_delta","thinking":"计算中..."}}`,
`event: content_block_stop` + "\n" + `data: {"type":"content_block_stop","index":0}`,
`event: content_block_start` + "\n" + `data: {"type":"content_block_start","index":1,"content_block":{"type":"text","text":""}}`,
`event: content_block_delta` + "\n" + `data: {"type":"content_block_delta","index":1,"delta":{"type":"text_delta","text":"答案是2。"}}`,
`event: content_block_stop` + "\n" + `data: {"type":"content_block_stop","index":1}`,
`event: message_delta` + "\n" + `data: {"type":"message_delta","delta":{"stop_reason":"end_turn"},"usage":{"output_tokens":25}}`,
`event: message_stop` + "\n" + `data: {"type":"message_stop"}`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 4096,
"messages": []map[string]any{{"role": "user", "content": "1+1=?"}},
"thinking": map[string]any{"type": "enabled", "budget_tokens": 1024},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "thinking_delta")
assert.Contains(t, respBody, "计算中...")
assert.Contains(t, respBody, "text_delta")
assert.Contains(t, respBody, "答案是2。")
}
// ============================================================
// 跨协议转换测试
// ============================================================
func TestE2E_CrossProtocol_OpenAIToAnthropic_RequestFormat(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/messages", req.URL.Path)
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
require.NoError(t, json.Unmarshal(body, &reqBody))
assert.Equal(t, "claude-model", reqBody["model"])
assert.NotNil(t, reqBody["max_tokens"])
msgs := reqBody["messages"].([]any)
require.GreaterOrEqual(t, len(msgs), 1)
firstMsg := msgs[0].(map[string]any)
assert.Equal(t, "user", firstMsg["role"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_cross_001", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "跨协议响应"}},
"model": "claude-model", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 10, "output_tokens": 5},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-model", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-model",
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "chat.completion", resp["object"])
msg := resp["choices"].([]any)[0].(map[string]any)["message"].(map[string]any)
assert.Contains(t, msg["content"], "跨协议响应")
}
func TestE2E_CrossProtocol_AnthropicToOpenAI_RequestFormat(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
require.NoError(t, json.Unmarshal(body, &reqBody))
assert.Equal(t, "gpt-4", reqBody["model"])
msgs := reqBody["messages"].([]any)
require.GreaterOrEqual(t, len(msgs), 1)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-cross", "object": "chat.completion", "model": "gpt-4", "created": time.Now().Unix(),
"choices": []map[string]any{{
"index": 0, "message": map[string]any{"role": "assistant", "content": "跨协议反向响应"},
"finish_reason": "stop",
}},
"usage": map[string]any{"prompt_tokens": 10, "completion_tokens": 8, "total_tokens": 18},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "message", resp["type"])
content := resp["content"].([]any)
assert.Contains(t, content[0].(map[string]any)["text"], "跨协议反向响应")
}
func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/messages", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`event: message_start` + "\n" + `data: {"type":"message_start","message":{"id":"msg_cross_stream","model":"claude-model","usage":{"input_tokens":10,"output_tokens":0}}}`,
`event: content_block_start` + "\n" + `data: {"type":"content_block_start","index":0,"content_block":{"type":"text","text":""}}`,
`event: content_block_delta` + "\n" + `data: {"type":"content_block_delta","index":0,"delta":{"type":"text_delta","text":"Hi"}}`,
`event: content_block_stop` + "\n" + `data: {"type":"content_block_stop","index":0}`,
`event: message_delta` + "\n" + `data: {"type":"message_delta","delta":{"stop_reason":"end_turn"},"usage":{"output_tokens":5}}`,
`event: message_stop` + "\n" + `data: {"type":"message_stop"}`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-model", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-model",
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Header().Get("Content-Type"), "text/event-stream")
respBody := w.Body.String()
assert.Contains(t, respBody, "chat.completion.chunk")
assert.Contains(t, respBody, "Hi")
assert.Contains(t, respBody, "[DONE]")
}
func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`data: {"id":"chatcmpl-cross-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{"role":"assistant"}}]}`,
`data: {"id":"chatcmpl-cross-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{"content":"Hey"}}]}`,
`data: {"id":"chatcmpl-cross-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}`,
`data: [DONE]`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "Hello"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "content_block_delta")
assert.Contains(t, respBody, "text_delta")
assert.Contains(t, respBody, "Hey")
assert.Contains(t, respBody, "message_stop")
}
// ============================================================
// 错误格式测试
// ============================================================
func TestE2E_OpenAI_ErrorResponse(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusNotFound)
json.NewEncoder(w).Encode(map[string]any{
"error": map[string]any{
"message": "The model `nonexistent` does not exist.",
"type": "invalid_request_error",
"param": nil,
"code": "model_not_found",
},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "nonexistent", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "nonexistent",
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.True(t, w.Code >= 400)
}
func TestE2E_Anthropic_ErrorResponse(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(map[string]any{
"type": "error",
"error": map[string]any{
"type": "invalid_request_error",
"message": "max_tokens is required",
},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.True(t, w.Code >= 400)
}
// ============================================================
// 补充场景测试
// ============================================================
func TestE2E_OpenAI_NonStream_ParallelToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-ptc", "object": "chat.completion", "created": 1700000050, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{
"role": "assistant",
"content": nil,
"tool_calls": []map[string]any{
{
"id": "call_ptc_1", "type": "function",
"function": map[string]any{"name": "get_weather", "arguments": `{"city":"北京"}`},
},
{
"id": "call_ptc_2", "type": "function",
"function": map[string]any{"name": "get_weather", "arguments": `{"city":"上海"}`},
},
},
},
"finish_reason": "tool_calls",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 100, "completion_tokens": 36, "total_tokens": 136},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "北京和上海的天气"}},
"tools": []map[string]any{{
"type": "function",
"function": map[string]any{
"name": "get_weather", "description": "获取天气",
"parameters": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
"required": []string{"city"},
},
},
}},
"tool_choice": "auto",
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
msg := resp["choices"].([]any)[0].(map[string]any)["message"].(map[string]any)
toolCalls := msg["tool_calls"].([]any)
require.Len(t, toolCalls, 2)
tc1 := toolCalls[0].(map[string]any)
tc2 := toolCalls[1].(map[string]any)
assert.Equal(t, "call_ptc_1", tc1["id"])
assert.Equal(t, "call_ptc_2", tc2["id"])
assert.Contains(t, tc1["function"].(map[string]any)["arguments"], "北京")
assert.Contains(t, tc2["function"].(map[string]any)["arguments"], "上海")
}
func TestE2E_OpenAI_NonStream_StopSequence(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-stop", "object": "chat.completion", "created": 1700000060, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "1, 2, 3, 4, "},
"finish_reason": "stop",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 10, "completion_tokens": 8, "total_tokens": 18},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "从1数到10"}},
"stop": []string{"5"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "stop", choice["finish_reason"])
}
func TestE2E_OpenAI_NonStream_ContentFilter(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-cf", "object": "chat.completion", "created": 1700000070, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{
"role": "assistant",
"content": nil,
"refusal": "内容被安全过滤器拦截。",
},
"finish_reason": "content_filter",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 8, "completion_tokens": 0, "total_tokens": 8},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "危险内容"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "content_filter", choice["finish_reason"])
}
func TestE2E_Anthropic_NonStream_MultiToolUse(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_mt", "type": "message", "role": "assistant",
"content": []map[string]any{
{"type": "tool_use", "id": "toolu_mt_1", "name": "get_weather", "input": map[string]any{"city": "北京"}},
{"type": "tool_use", "id": "toolu_mt_2", "name": "get_weather", "input": map[string]any{"city": "上海"}},
},
"model": "claude-opus-4-7", "stop_reason": "tool_use", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 200, "output_tokens": 84},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "北京和上海的天气"}},
"tools": []map[string]any{{
"name": "get_weather", "description": "获取天气",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
"required": []string{"city"},
},
}},
"tool_choice": map[string]any{"type": "auto"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
content := resp["content"].([]any)
require.Len(t, content, 2)
assert.Equal(t, "tool_use", content[0].(map[string]any)["type"])
assert.Equal(t, "toolu_mt_1", content[0].(map[string]any)["id"])
assert.Equal(t, "toolu_mt_2", content[1].(map[string]any)["id"])
}
func TestE2E_Anthropic_NonStream_ToolChoiceAny(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
tc, _ := reqBody["tool_choice"].(map[string]any)
assert.Equal(t, "any", tc["type"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_tca", "type": "message", "role": "assistant",
"content": []map[string]any{
{"type": "tool_use", "id": "toolu_tca_1", "name": "get_time", "input": map[string]any{"timezone": "Asia/Shanghai"}},
},
"model": "claude-opus-4-7", "stop_reason": "tool_use", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 100, "output_tokens": 30},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "现在几点了?"}},
"tools": []map[string]any{{
"name": "get_time", "description": "获取当前时间",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"timezone": map[string]any{"type": "string"}},
},
}},
"tool_choice": map[string]any{"type": "any"},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "tool_use", resp["stop_reason"])
}
func TestE2E_Anthropic_NonStream_ArraySystemPrompt(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
sys, ok := reqBody["system"].([]any)
require.True(t, ok, "system should be an array")
require.GreaterOrEqual(t, len(sys), 1)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_asys", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "已收到多条系统指令。"}},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 50, "output_tokens": 10},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"system": []map[string]any{
{"type": "text", "text": "你是编程助手。"},
{"type": "text", "text": "请用中文回答。"},
},
"messages": []map[string]any{{"role": "user", "content": "你好"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
}
func TestE2E_Anthropic_NonStream_ToolResultMessage(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
msgs := reqBody["messages"].([]any)
require.GreaterOrEqual(t, len(msgs), 3)
lastMsg := msgs[len(msgs)-1].(map[string]any)
assert.Equal(t, "user", lastMsg["role"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_e2e_tr", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "北京当前晴天温度25°C。"}},
"model": "claude-opus-4-7", "stop_reason": "end_turn", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 150, "output_tokens": 20},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{
{"role": "user", "content": "北京天气"},
{"role": "assistant", "content": []map[string]any{
{"type": "tool_use", "id": "toolu_prev", "name": "get_weather", "input": map[string]any{"city": "北京"}},
}},
{"role": "user", "content": []map[string]any{
{"type": "tool_result", "tool_use_id": "toolu_prev", "content": "晴天25°C"},
}},
},
"tools": []map[string]any{{
"name": "get_weather", "description": "获取天气",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
},
}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Contains(t, resp["content"].([]any)[0].(map[string]any)["text"], "25")
}
func TestE2E_Anthropic_Stream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
"event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_stream_tc\",\"role\":\"assistant\",\"model\":\"claude-opus-4-7\",\"usage\":{\"input_tokens\":50,\"output_tokens\":0}}}\n\n",
"event: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_stream_tc\",\"name\":\"get_weather\",\"input\":{}}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"city\\\":\\\"北京\\\"}\"}}\n\n",
"event: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
"event: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"tool_use\"},\"usage\":{\"output_tokens\":30}}\n\n",
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n",
}
for _, e := range events {
w.Write([]byte(e))
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"name": "get_weather", "description": "获取天气",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
},
}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "tool_use")
assert.Contains(t, respBody, "get_weather")
assert.Contains(t, respBody, "input_json_delta")
}
func TestE2E_CrossProtocol_OpenAIToAnthropic_NonStream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/messages", req.URL.Path)
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
require.NoError(t, json.Unmarshal(body, &reqBody))
msgs := reqBody["messages"].([]any)
require.GreaterOrEqual(t, len(msgs), 1)
tools, hasTools := reqBody["tools"].([]any)
require.True(t, hasTools)
require.GreaterOrEqual(t, len(tools), 1)
tool := tools[0].(map[string]any)
assert.Equal(t, "get_weather", tool["name"])
assert.NotNil(t, tool["input_schema"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_cross_tc", "type": "message", "role": "assistant",
"content": []map[string]any{{
"type": "tool_use", "id": "toolu_cross_tc", "name": "get_weather",
"input": map[string]any{"city": "北京"},
}},
"model": "claude-model", "stop_reason": "tool_use", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 100, "output_tokens": 30},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-model", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-model",
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"type": "function",
"function": map[string]any{
"name": "get_weather", "description": "获取天气",
"parameters": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
"required": []string{"city"},
},
},
}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "tool_calls", choice["finish_reason"])
msg := choice["message"].(map[string]any)
toolCalls := msg["tool_calls"].([]any)
require.Len(t, toolCalls, 1)
tc := toolCalls[0].(map[string]any)
assert.Equal(t, "get_weather", tc["function"].(map[string]any)["name"])
}
func TestE2E_CrossProtocol_AnthropicToOpenAI_NonStream_Thinking(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-cross-think", "object": "chat.completion", "model": "gpt-4", "created": time.Now().Unix(),
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{
"role": "assistant",
"content": "经过思考答案是42。",
},
"finish_reason": "stop",
}},
"usage": map[string]any{"prompt_tokens": 50, "completion_tokens": 100, "total_tokens": 150},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4", "max_tokens": 4096,
"messages": []map[string]any{{"role": "user", "content": "宇宙的答案"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "message", resp["type"])
content := resp["content"].([]any)
assert.Contains(t, content[0].(map[string]any)["text"], "42")
}
func TestE2E_CrossProtocol_StopReasonMapping(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "msg_cross_stop", "type": "message", "role": "assistant",
"content": []map[string]any{{"type": "text", "text": "被截断的内容..."}},
"model": "claude-model", "stop_reason": "max_tokens", "stop_sequence": nil,
"usage": map[string]any{"input_tokens": 10, "output_tokens": 20},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-model", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-model",
"messages": []map[string]any{{"role": "user", "content": "长文"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
choice := resp["choices"].([]any)[0].(map[string]any)
assert.Equal(t, "length", choice["finish_reason"])
}
func TestE2E_OpenAI_NonStream_AssistantWithToolResult(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
body, _ := io.ReadAll(req.Body)
var reqBody map[string]any
json.Unmarshal(body, &reqBody)
msgs := reqBody["messages"].([]any)
require.GreaterOrEqual(t, len(msgs), 3)
toolMsg := msgs[2].(map[string]any)
assert.Equal(t, "tool", toolMsg["role"])
assert.Equal(t, "call_e2e_001", toolMsg["tool_call_id"])
w.Header().Set("Content-Type", "application/json")
json.NewEncoder(w).Encode(map[string]any{
"id": "chatcmpl-e2e-tr", "object": "chat.completion", "created": 1700000080, "model": "gpt-4o",
"choices": []map[string]any{{
"index": 0,
"message": map[string]any{"role": "assistant", "content": "北京当前晴天温度25°C。"},
"finish_reason": "stop",
"logprobs": nil,
}},
"usage": map[string]any{"prompt_tokens": 100, "completion_tokens": 20, "total_tokens": 120},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{
{"role": "user", "content": "北京天气"},
{"role": "assistant", "content": nil, "tool_calls": []map[string]any{{
"id": "call_e2e_001", "type": "function",
"function": map[string]any{"name": "get_weather", "arguments": `{"city":"北京"}`},
}}},
{"role": "tool", "tool_call_id": "call_e2e_001", "content": "晴天25°C"},
},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
msg := resp["choices"].([]any)[0].(map[string]any)["message"].(map[string]any)
assert.Contains(t, msg["content"], "25")
}
func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/messages", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
"event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_cross_tc_stream\",\"role\":\"assistant\",\"model\":\"claude-model\",\"usage\":{\"input_tokens\":50,\"output_tokens\":0}}}\n\n",
"event: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"tool_use\",\"id\":\"toolu_cross_s1\",\"name\":\"get_weather\",\"input\":{}}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"input_json_delta\",\"partial_json\":\"{\\\"city\\\":\\\"北京\\\"}\"}}\n\n",
"event: content_block_stop\ndata: {\"type\":\"content_block_stop\",\"index\":0}\n\n",
"event: message_delta\ndata: {\"type\":\"message_delta\",\"delta\":{\"stop_reason\":\"tool_use\"},\"usage\":{\"output_tokens\":30}}\n\n",
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n",
}
for _, e := range events {
w.Write([]byte(e))
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-model", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-model",
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"type": "function",
"function": map[string]any{
"name": "get_weather", "description": "获取天气",
"parameters": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
},
},
}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "tool_calls")
assert.Contains(t, respBody, "get_weather")
}
func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream_ToolCalls(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
assert.Equal(t, "/v1/chat/completions", req.URL.Path)
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
`data: {"id":"chatcmpl-cross-tc-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{"role":"assistant","content":null,"tool_calls":[{"index":0,"id":"call_cross_tc","type":"function","function":{"name":"get_weather","arguments":""}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-cross-tc-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"{\"city\":"}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-cross-tc-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{"tool_calls":[{"index":0,"function":{"arguments":"\"北京\"}"}}]},"finish_reason":null}]}`,
`data: {"id":"chatcmpl-cross-tc-s","object":"chat.completion.chunk","model":"gpt-4","choices":[{"index":0,"delta":{},"finish_reason":"tool_calls"}]}`,
`data: [DONE]`,
}
for _, e := range events {
fmt.Fprintf(w, "%s\n\n", e)
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "北京天气"}},
"tools": []map[string]any{{
"name": "get_weather", "description": "获取天气",
"input_schema": map[string]any{
"type": "object",
"properties": map[string]any{"city": map[string]any{"type": "string"}},
},
}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "tool_use")
assert.Contains(t, respBody, "get_weather")
}
func TestE2E_OpenAI_Upstream5xx_ErrorPassthrough(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]any{
"error": map[string]any{
"message": "Internal server error",
"type": "server_error",
"code": "internal_error",
},
})
})
e2eCreateProviderAndModel(t, r, "openai-p", "openai", "gpt-4o", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "gpt-4o",
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
errObj, ok := resp["error"].(map[string]any)
require.True(t, ok, "response should contain error object")
assert.Contains(t, errObj["message"], "Internal server error")
}
func TestE2E_Anthropic_Upstream5xx_ErrorPassthrough(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusInternalServerError)
json.NewEncoder(w).Encode(map[string]any{
"type": "error",
"error": map[string]any{
"type": "api_error",
"message": "Internal server error",
},
})
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "test"}},
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, http.StatusInternalServerError, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "error", resp["type"])
errObj, ok := resp["error"].(map[string]any)
require.True(t, ok, "response should contain error object")
assert.Contains(t, errObj["message"], "Internal server error")
}
func TestE2E_Anthropic_Stream_TruncatedSSE(t *testing.T) {
r, upstream := setupE2ETest(t)
upstream.Config.Handler = http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher := w.(http.Flusher)
events := []string{
"event: message_start\ndata: {\"type\":\"message_start\",\"message\":{\"id\":\"msg_trunc\",\"role\":\"assistant\",\"model\":\"claude-opus-4-7\",\"usage\":{\"input_tokens\":10,\"output_tokens\":0}}}\n\n",
"event: content_block_start\ndata: {\"type\":\"content_block_start\",\"index\":0,\"content_block\":{\"type\":\"text\",\"text\":\"\"}}\n\n",
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"正常\"}}\n\n",
}
for _, e := range events {
w.Write([]byte(e))
flusher.Flush()
time.Sleep(10 * time.Millisecond)
}
})
e2eCreateProviderAndModel(t, r, "anthropic-p", "anthropic", "claude-opus-4-7", upstream.URL)
body, _ := json.Marshal(map[string]any{
"model": "claude-opus-4-7", "max_tokens": 1024,
"messages": []map[string]any{{"role": "user", "content": "test"}},
"stream": true,
})
w := httptest.NewRecorder()
req := httptest.NewRequest("POST", "/anthropic/v1/messages", bytes.NewReader(body))
req.Header.Set("Content-Type", "application/json")
r.ServeHTTP(w, req)
assert.Equal(t, 200, w.Code)
respBody := w.Body.String()
assert.Contains(t, respBody, "message_start")
assert.Contains(t, respBody, "正常")
}
var _ = fmt.Sprintf
var _ = time.Now