@@ -12,19 +12,20 @@ import (
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/anthropic"
openaiConv "nex/backend/internal/conversion/openai"
"nex/backend/internal/handler"
"nex/backend/internal/handler/middleware"
"nex/backend/internal/provider"
"nex/backend/internal/repository"
"nex/backend/internal/service"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/zap"
openaiConv "nex/backend/internal/conversion/openai"
)
func setupE2ETest ( t * testing . T ) ( * gin . Engine , * httptest . Server ) {
@@ -33,7 +34,8 @@ func setupE2ETest(t *testing.T) (*gin.Engine, *httptest.Server) {
upstream := httptest . NewServer ( http . HandlerFunc ( func ( w http . ResponseWriter , r * http . Request ) {
w . WriteHeader ( http . StatusOK )
w . Write ( [ ] byte ( ` { "error":"not mocked"} ` ) )
_ , err := w. Write ( [ ] byte ( ` { "error":"not mocked"} ` ) )
require . NoError ( t , err )
} ) )
db := setupTestDB ( t )
@@ -115,11 +117,12 @@ func parseSSEEvents(body string) []map[string]string {
var currentEvent , currentData string
for scanner . Scan ( ) {
line := scanner . Text ( )
if strings . HasPrefix ( line , "event: " ) {
switch {
case strings . HasPrefix ( line , "event: " ) :
currentEvent = strings . TrimPrefix ( line , "event: " )
} else if strings . HasPrefix ( line , "data: " ) {
case strings . HasPrefix ( line , "data: " ) :
currentData = strings . TrimPrefix ( line , "data: " )
} else if line == "" && ( currentEvent != "" || currentData != "" ) {
case line == "" && ( currentEvent != "" || currentData != "" ) :
events = append ( events , map [ string ] string {
"event" : currentEvent ,
"data" : currentData ,
@@ -157,21 +160,21 @@ func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
assert . Equal ( t , "/chat/completions" , req . URL . Path )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-001" ,
"object" : "chat.completion" ,
"created" : 1700000000 ,
"model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "你好! 我是AI助手。" } ,
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "你好! 我是AI助手。" } ,
"finish_reason" : "stop" ,
"logprobs" : nil ,
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any {
"prompt_tokens" : 15 , "completion_tokens" : 10 , "total_tokens" : 25 ,
} ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
@@ -210,21 +213,23 @@ func TestE2E_OpenAI_NonStream_BasicText(t *testing.T) {
func TestE2E_OpenAI_NonStream_MultiTurn ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
msgs := reqBody [ "messages" ] . ( [ ] any )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
msgs , ok := reqBody [ "messages" ] . ( [ ] any )
require . True ( t , ok )
assert . GreaterOrEqual ( t , len ( msgs ) , 3 )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-002" , "object" : "chat.completion" , "created" : 1700000001 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 , "message" : map [ string ] any { "role" : "assistant" , "content" : "Go语言的interface是隐式实现的。" } ,
"finish_reason" : "stop" , "logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 100 , "completion_tokens" : 20 , "total_tokens" : 120 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
@@ -252,7 +257,7 @@ func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-004" , "object" : "chat.completion" , "created" : 1700000003 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
@@ -272,7 +277,7 @@ func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 80 , "completion_tokens" : 18 , "total_tokens" : 98 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
@@ -286,9 +291,9 @@ func TestE2E_OpenAI_NonStream_ToolCalls(t *testing.T) {
"function" : map [ string ] any {
"name" : "get_weather" , "description" : "获取天气" ,
"parameters" : map [ string ] any {
"type" : "object" ,
"type" : "object" ,
"properties" : map [ string ] any { "city" : map [ string ] any { "type" : "string" } } ,
"required" : [ ] string { "city" } ,
"required" : [ ] string { "city" } ,
} ,
} ,
} } ,
@@ -319,22 +324,22 @@ func TestE2E_OpenAI_NonStream_MaxTokens_Length(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-014" , "object" : "chat.completion" , "created" : 1700000014 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "人工智能起源于1950年代..." } ,
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "人工智能起源于1950年代..." } ,
"finish_reason" : "length" ,
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 20 , "completion_tokens" : 30 , "total_tokens" : 50 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "介绍AI历史" } } ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "介绍AI历史" } } ,
"max_tokens" : 30 ,
} )
w := httptest . NewRecorder ( )
@@ -353,11 +358,11 @@ func TestE2E_OpenAI_NonStream_UsageWithReasoning(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-022" , "object" : "chat.completion" , "created" : 1700000022 , "model" : "o3" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "答案是61。" } ,
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "答案是61。" } ,
"finish_reason" : "stop" ,
"logprobs" : nil ,
} } ,
@@ -365,12 +370,12 @@ func TestE2E_OpenAI_NonStream_UsageWithReasoning(t *testing.T) {
"prompt_tokens" : 35 , "completion_tokens" : 48 , "total_tokens" : 83 ,
"completion_tokens_details" : map [ string ] any { "reasoning_tokens" : 20 } ,
} ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "o3" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/o3" ,
"model" : "openai_p/o3" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "15+23*2=?" } } ,
} )
w := httptest . NewRecorder ( )
@@ -393,12 +398,12 @@ func TestE2E_OpenAI_NonStream_Refusal(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-007" , "object" : "chat.completion" , "created" : 1700000007 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any {
"role" : "assistant" ,
"role" : "assistant" ,
"content" : nil ,
"refusal" : "抱歉,我无法提供涉及危险活动的信息。" ,
} ,
@@ -406,12 +411,12 @@ func TestE2E_OpenAI_NonStream_Refusal(t *testing.T) {
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 12 , "completion_tokens" : 35 , "total_tokens" : 47 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "做坏事" } } ,
} )
w := httptest . NewRecorder ( )
@@ -453,9 +458,9 @@ func TestE2E_OpenAI_Stream_Text(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "你好" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/openai/chat/completions" , bytes . NewReader ( body ) )
@@ -497,14 +502,14 @@ func TestE2E_OpenAI_Stream_ToolCalls(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "北京天气" } } ,
"tools" : [ ] map [ string ] any { {
"type" : "function" ,
"function" : map [ string ] any {
"name" : "get_weather" , "description" : "获取天气" ,
"parameters" : map [ string ] any {
"type" : "object" ,
"type" : "object" ,
"properties" : map [ string ] any { "city" : map [ string ] any { "type" : "string" } } ,
} ,
} ,
@@ -546,9 +551,9 @@ func TestE2E_OpenAI_Stream_WithUsage(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "hi" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/openai/chat/completions" , bytes . NewReader ( body ) )
@@ -569,14 +574,14 @@ func TestE2E_Anthropic_NonStream_BasicText(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_001" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any {
{ "type" : "text" , "text" : "你好! 我是Claude, 由Anthropic开发的AI助手。" } ,
} ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 15 , "output_tokens" : 25 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -611,24 +616,25 @@ func TestE2E_Anthropic_NonStream_BasicText(t *testing.T) {
func TestE2E_Anthropic_NonStream_WithSystem ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
assert . NotNil ( t , reqBody [ "system" ] )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_003" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "递归是函数调用自身。" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 30 , "output_tokens" : 15 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 1024 ,
"system" : "你是编程助手" ,
"system" : "你是编程助手" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "什么是递归?" } } ,
} )
w := httptest . NewRecorder ( )
@@ -643,7 +649,7 @@ func TestE2E_Anthropic_NonStream_ToolUse(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_009" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { {
"type" : "tool_use" , "id" : "toolu_e2e_009" , "name" : "get_weather" ,
@@ -651,7 +657,7 @@ func TestE2E_Anthropic_NonStream_ToolUse(t *testing.T) {
} } ,
"model" : "claude-opus-4-7" , "stop_reason" : "tool_use" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 180 , "output_tokens" : 42 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -661,9 +667,9 @@ func TestE2E_Anthropic_NonStream_ToolUse(t *testing.T) {
"tools" : [ ] map [ string ] any { {
"name" : "get_weather" , "description" : "获取天气" ,
"input_schema" : map [ string ] any {
"type" : "object" ,
"type" : "object" ,
"properties" : map [ string ] any { "city" : map [ string ] any { "type" : "string" } } ,
"required" : [ ] string { "city" } ,
"required" : [ ] string { "city" } ,
} ,
} } ,
"tool_choice" : map [ string ] any { "type" : "auto" } ,
@@ -689,7 +695,7 @@ func TestE2E_Anthropic_NonStream_Thinking(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_018" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any {
{ "type" : "thinking" , "thinking" : "这是一个逻辑推理问题..." } ,
@@ -697,7 +703,7 @@ func TestE2E_Anthropic_NonStream_Thinking(t *testing.T) {
} ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 95 , "output_tokens" : 280 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -724,12 +730,12 @@ func TestE2E_Anthropic_NonStream_MaxTokens(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_016" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "人工智能起源于..." } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "max_tokens" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "max_tokens" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 22 , "output_tokens" : 20 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -752,18 +758,18 @@ func TestE2E_Anthropic_NonStream_StopSequence(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_017" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "1\n2\n3\n4\n" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "stop_sequence" , "stop_sequence" : "5" ,
"model" : "claude-opus-4-7" , "stop_reason" : "stop_sequence" , "stop_sequence" : "5" ,
"usage" : map [ string ] any { "input_tokens" : 22 , "output_tokens" : 10 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 1024 ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "从1数到10" } } ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "从1数到10" } } ,
"stop_sequences" : [ ] string { "5" } ,
} )
w := httptest . NewRecorder ( )
@@ -781,19 +787,20 @@ func TestE2E_Anthropic_NonStream_StopSequence(t *testing.T) {
func TestE2E_Anthropic_NonStream_MetadataUserID ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
metadata , _ := reqBody [ "metadata" ] . ( map [ string ] any )
assert . Equal ( t , "user_12345" , metadata [ "user_id" ] )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_026" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "你好!" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 12 , "output_tokens" : 5 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -814,21 +821,21 @@ func TestE2E_Anthropic_NonStream_UsageWithCache(t *testing.T) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_025" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "你好!" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any {
"input_tokens" : 25 , "output_tokens" : 5 ,
"cache_creation_input_tokens" : 15 , "cache_read_input_tokens" : 0 ,
} ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 1024 ,
"system" : [ ] map [ string ] any { { "type" : "text" , "text" : "你是编程助手。" } } ,
"system" : [ ] map [ string ] any { { "type" : "text" , "text" : "你是编程助手。" } } ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "你好" } } ,
} )
w := httptest . NewRecorder ( )
@@ -864,7 +871,8 @@ func TestE2E_Anthropic_Stream_Text(t *testing.T) {
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n" ,
}
for _ , e := range events {
w . Write ( [ ] byte ( e ) )
_ , err := w. Write ( [ ] byte ( e ) )
require . NoError ( t , err )
flusher . Flush ( )
time . Sleep ( 10 * time . Millisecond )
}
@@ -874,7 +882,7 @@ func TestE2E_Anthropic_Stream_Text(t *testing.T) {
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 1024 ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "你好" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/anthropic/v1/messages" , bytes . NewReader ( body ) )
@@ -922,7 +930,7 @@ func TestE2E_Anthropic_Stream_Thinking(t *testing.T) {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 4096 ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "1+1=?" } } ,
"thinking" : map [ string ] any { "type" : "enabled" , "budget_tokens" : 1024 } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/anthropic/v1/messages" , bytes . NewReader ( body ) )
@@ -961,14 +969,14 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_RequestFormat(t *testing.T) {
json . NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_cross_001" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "跨协议响应" } } ,
"model" : "claude-model" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-model" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 10 , "output_tokens" : 5 } ,
} )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-model" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-model" ,
"model" : "anthropic_p/claude-model" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "Hello" } } ,
} )
w := httptest . NewRecorder ( )
@@ -1050,9 +1058,9 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_Stream(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-model" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-model" ,
"model" : "anthropic_p/claude-model" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "Hello" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/openai/chat/completions" , bytes . NewReader ( body ) )
@@ -1092,7 +1100,7 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream(t *testing.T) {
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4" , "max_tokens" : 1024 ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "Hello" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/anthropic/v1/messages" , bytes . NewReader ( body ) )
@@ -1128,7 +1136,7 @@ func TestE2E_OpenAI_ErrorResponse(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "nonexistent" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/nonexistent" ,
"model" : "openai_p/nonexistent" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "test" } } ,
} )
w := httptest . NewRecorder ( )
@@ -1183,11 +1191,11 @@ func TestE2E_OpenAI_NonStream_ParallelToolCalls(t *testing.T) {
"content" : nil ,
"tool_calls" : [ ] map [ string ] any {
{
"id" : "call_ptc_1" , "type" : "function" ,
"id" : "call_ptc_1" , "type" : "function" ,
"function" : map [ string ] any { "name" : "get_weather" , "arguments" : ` { "city":"北京"} ` } ,
} ,
{
"id" : "call_ptc_2" , "type" : "function" ,
"id" : "call_ptc_2" , "type" : "function" ,
"function" : map [ string ] any { "name" : "get_weather" , "arguments" : ` { "city":"上海"} ` } ,
} ,
} ,
@@ -1201,7 +1209,7 @@ func TestE2E_OpenAI_NonStream_ParallelToolCalls(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "北京和上海的天气" } } ,
"tools" : [ ] map [ string ] any { {
"type" : "function" ,
@@ -1242,10 +1250,10 @@ func TestE2E_OpenAI_NonStream_StopSequence(t *testing.T) {
json . NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-stop" , "object" : "chat.completion" , "created" : 1700000060 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "1, 2, 3, 4, " } ,
"finish_reason" : "stop" ,
"logprobs" : nil ,
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "1, 2, 3, 4, " } ,
"finish_reason" : "stop" ,
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 10 , "completion_tokens" : 8 , "total_tokens" : 18 } ,
} )
@@ -1253,9 +1261,9 @@ func TestE2E_OpenAI_NonStream_StopSequence(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "从1数到10" } } ,
"stop" : [ ] string { "5" } ,
"stop" : [ ] string { "5" } ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/openai/chat/completions" , bytes . NewReader ( body ) )
@@ -1291,7 +1299,7 @@ func TestE2E_OpenAI_NonStream_ContentFilter(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "危险内容" } } ,
} )
w := httptest . NewRecorder ( )
@@ -1353,21 +1361,22 @@ func TestE2E_Anthropic_NonStream_MultiToolUse(t *testing.T) {
func TestE2E_Anthropic_NonStream_ToolChoiceAny ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
tc , _ := reqBody [ "tool_choice" ] . ( map [ string ] any )
assert . Equal ( t , "any" , tc [ "type" ] )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_tca" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any {
{ "type" : "tool_use" , "id" : "toolu_tca_1" , "name" : "get_time" , "input" : map [ string ] any { "timezone" : "Asia/Shanghai" } } ,
} ,
"model" : "claude-opus-4-7" , "stop_reason" : "tool_use" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 100 , "output_tokens" : 30 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -1397,20 +1406,21 @@ func TestE2E_Anthropic_NonStream_ToolChoiceAny(t *testing.T) {
func TestE2E_Anthropic_NonStream_ArraySystemPrompt ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
sys , ok := reqBody [ "system" ] . ( [ ] any )
require . True ( t , ok , "system should be an array" )
require . GreaterOrEqual ( t , len ( sys ) , 1 )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_asys" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "已收到多条系统指令。" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 50 , "output_tokens" : 10 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -1433,21 +1443,22 @@ func TestE2E_Anthropic_NonStream_ArraySystemPrompt(t *testing.T) {
func TestE2E_Anthropic_NonStream_ToolResultMessage ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
msgs := reqBody [ "messages" ] . ( [ ] any )
require . GreaterOrEqual ( t , len ( msgs ) , 3 )
lastMsg := msgs [ len ( msgs ) - 1 ] . ( map [ string ] any )
assert . Equal ( t , "user" , lastMsg [ "role" ] )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_e2e_tr" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "北京当前晴天, 温度25°C。" } } ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"model" : "claude-opus-4-7" , "stop_reason" : "end_turn" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 150 , "output_tokens" : 20 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-opus-4-7" , upstream . URL )
@@ -1497,7 +1508,8 @@ func TestE2E_Anthropic_Stream_ToolCalls(t *testing.T) {
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n" ,
}
for _ , e := range events {
w . Write ( [ ] byte ( e ) )
_ , err := w. Write ( [ ] byte ( e ) )
require . NoError ( t , err )
flusher . Flush ( )
time . Sleep ( 10 * time . Millisecond )
}
@@ -1559,7 +1571,7 @@ func TestE2E_CrossProtocol_OpenAIToAnthropic_NonStream_ToolCalls(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-model" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-model" ,
"model" : "anthropic_p/claude-model" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "北京天气" } } ,
"tools" : [ ] map [ string ] any { {
"type" : "function" ,
@@ -1634,14 +1646,14 @@ func TestE2E_CrossProtocol_StopReasonMapping(t *testing.T) {
json . NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "msg_cross_stop" , "type" : "message" , "role" : "assistant" ,
"content" : [ ] map [ string ] any { { "type" : "text" , "text" : "被截断的内容..." } } ,
"model" : "claude-model" , "stop_reason" : "max_tokens" , "stop_sequence" : nil ,
"model" : "claude-model" , "stop_reason" : "max_tokens" , "stop_sequence" : nil ,
"usage" : map [ string ] any { "input_tokens" : 10 , "output_tokens" : 20 } ,
} )
} )
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-model" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-model" ,
"model" : "anthropic_p/claude-model" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "长文" } } ,
} )
w := httptest . NewRecorder ( )
@@ -1659,9 +1671,10 @@ func TestE2E_CrossProtocol_StopReasonMapping(t *testing.T) {
func TestE2E_OpenAI_NonStream_AssistantWithToolResult ( t * testing . T ) {
r , upstream := setupE2ETest ( t )
upstream . Config . Handler = http . HandlerFunc ( func ( w http . ResponseWriter , req * http . Request ) {
body , _ := io . ReadAll ( req . Body )
body , err := io . ReadAll ( req . Body )
require . NoError ( t , err )
var reqBody map [ string ] any
json . Unmarshal ( body , & reqBody )
require . NoError ( t , json. Unmarshal ( body , & reqBody ) )
msgs := reqBody [ "messages" ] . ( [ ] any )
require . GreaterOrEqual ( t , len ( msgs ) , 3 )
toolMsg := msgs [ 2 ] . ( map [ string ] any )
@@ -1669,16 +1682,16 @@ func TestE2E_OpenAI_NonStream_AssistantWithToolResult(t *testing.T) {
assert . Equal ( t , "call_e2e_001" , toolMsg [ "tool_call_id" ] )
w . Header ( ) . Set ( "Content-Type" , "application/json" )
json . NewEncoder ( w ) . Encode ( map [ string ] any {
require . NoError ( t , json. NewEncoder ( w ) . Encode ( map [ string ] any {
"id" : "chatcmpl-e2e-tr" , "object" : "chat.completion" , "created" : 1700000080 , "model" : "gpt-4o" ,
"choices" : [ ] map [ string ] any { {
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "北京当前晴天, 温度25°C。" } ,
"index" : 0 ,
"message" : map [ string ] any { "role" : "assistant" , "content" : "北京当前晴天, 温度25°C。" } ,
"finish_reason" : "stop" ,
"logprobs" : nil ,
} } ,
"usage" : map [ string ] any { "prompt_tokens" : 100 , "completion_tokens" : 20 , "total_tokens" : 120 } ,
} )
} ) )
} )
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
@@ -1722,7 +1735,8 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
"event: message_stop\ndata: {\"type\":\"message_stop\"}\n\n" ,
}
for _ , e := range events {
w . Write ( [ ] byte ( e ) )
_ , err := w. Write ( [ ] byte ( e ) )
require . NoError ( t , err )
flusher . Flush ( )
time . Sleep ( 10 * time . Millisecond )
}
@@ -1730,7 +1744,7 @@ func TestE2E_CrossProtocol_AnthropicToOpenAI_Stream_ToolCalls(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "anthropic_p" , "anthropic" , "claude-model" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-model" ,
"model" : "anthropic_p/claude-model" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "北京天气" } } ,
"tools" : [ ] map [ string ] any { {
"type" : "function" ,
@@ -1817,7 +1831,7 @@ func TestE2E_OpenAI_Upstream5xx_ErrorPassthrough(t *testing.T) {
e2eCreateProviderAndModel ( t , r , "openai_p" , "openai" , "gpt-4o" , upstream . URL )
body , _ := json . Marshal ( map [ string ] any {
"model" : "openai_p/gpt-4o" ,
"model" : "openai_p/gpt-4o" ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "test" } } ,
} )
w := httptest . NewRecorder ( )
@@ -1879,7 +1893,8 @@ func TestE2E_Anthropic_Stream_TruncatedSSE(t *testing.T) {
"event: content_block_delta\ndata: {\"type\":\"content_block_delta\",\"index\":0,\"delta\":{\"type\":\"text_delta\",\"text\":\"正常\"}}\n\n" ,
}
for _ , e := range events {
w . Write ( [ ] byte ( e ) )
_ , err := w. Write ( [ ] byte ( e ) )
require . NoError ( t , err )
flusher . Flush ( )
time . Sleep ( 10 * time . Millisecond )
}
@@ -1889,7 +1904,7 @@ func TestE2E_Anthropic_Stream_TruncatedSSE(t *testing.T) {
body , _ := json . Marshal ( map [ string ] any {
"model" : "anthropic_p/claude-opus-4-7" , "max_tokens" : 1024 ,
"messages" : [ ] map [ string ] any { { "role" : "user" , "content" : "test" } } ,
"stream" : true ,
"stream" : true ,
} )
w := httptest . NewRecorder ( )
req := httptest . NewRequest ( "POST" , "/anthropic/v1/messages" , bytes . NewReader ( body ) )
@@ -1902,5 +1917,7 @@ func TestE2E_Anthropic_Stream_TruncatedSSE(t *testing.T) {
assert . Contains ( t , respBody , "正常" )
}
var _ = fmt . Sprintf
var _ = time . Now
var (
_ = fmt . Sprintf
_ = time . Now
)