1
0

refactor: 实现 ConversionEngine 协议转换引擎,替代旧 protocol 包

- 新增 ConversionEngine 核心引擎,支持 OpenAI 和 Anthropic 协议转换
- 添加 stream decoder/encoder 实现
- 更新 provider client 支持新引擎
- 补充单元测试和集成测试
- 更新 specs 文档
This commit is contained in:
2026-04-20 13:01:05 +08:00
parent 1dac347d3b
commit bc1ee612d9
39 changed files with 11177 additions and 995 deletions

View File

@@ -251,7 +251,7 @@ func (d *StreamDecoder) processMessageDelta(data []byte) []canonical.CanonicalSt
}
if d.accumulatedUsage != nil {
d.accumulatedUsage.OutputTokens += raw.Usage.OutputTokens
d.accumulatedUsage.OutputTokens = raw.Usage.OutputTokens
}
return []canonical.CanonicalStreamEvent{

View File

@@ -272,3 +272,218 @@ func TestStreamDecoder_RedactedDeltaSuppressed(t *testing.T) {
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_ServerToolUse_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 2,
"content_block": map[string]any{
"type": "server_tool_use",
"id": "server_tool_1",
"name": "web_search",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[2])
}
func TestStreamDecoder_WebSearchToolResult_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 3,
"content_block": map[string]any{
"type": "web_search_tool_result",
"tool_use_id": "search_1",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[3])
}
func TestStreamDecoder_CodeExecutionToolResult_Suppressed(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 4,
"content_block": map[string]any{
"type": "code_execution_tool_result",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
assert.True(t, d.redactedBlocks[4])
}
func TestStreamDecoder_CitationsDelta_Discarded(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "citations_delta",
"citation": map[string]any{"title": "ref1"},
},
}
raw := makeAnthropicEvent("content_block_delta", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_SignatureDelta_Discarded(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "signature_delta",
"signature": "sig_123",
},
}
raw := makeAnthropicEvent("content_block_delta", payload)
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_UnknownEventType(t *testing.T) {
d := NewStreamDecoder()
raw := makeAnthropicEvent("unknown_event", map[string]any{"type": "unknown_event"})
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_InvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("event: message_start\ndata: {invalid}\n\n")
events := d.ProcessChunk(raw)
assert.Empty(t, events)
}
func TestStreamDecoder_MultipleEventsInSingleChunk(t *testing.T) {
d := NewStreamDecoder()
startPayload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_multi",
"model": "claude-3",
},
}
deltaPayload := map[string]any{
"type": "content_block_delta",
"index": 0,
"delta": map[string]any{
"type": "text_delta",
"text": "Hello",
},
}
stopPayload := map[string]any{"type": "message_stop"}
var raw []byte
raw = append(raw, makeAnthropicEvent("message_start", startPayload)...)
raw = append(raw, makeAnthropicEvent("content_block_delta", deltaPayload)...)
raw = append(raw, makeAnthropicEvent("message_stop", stopPayload)...)
events := d.ProcessChunk(raw)
require.Len(t, events, 3)
assert.Equal(t, canonical.EventMessageStart, events[0].Type)
assert.Equal(t, canonical.EventContentBlockDelta, events[1].Type)
assert.Equal(t, canonical.EventMessageStop, events[2].Type)
}
func TestStreamDecoder_ErrorInvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("event: error\ndata: {invalid}\n\n")
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventError, events[0].Type)
assert.Contains(t, events[0].Error.Message, "解析错误事件失败")
}
func TestStreamDecoder_MessageStartWithUsage(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_usage",
"model": "claude-3",
"usage": map[string]any{"input_tokens": 25, "output_tokens": 0},
},
}
raw := makeAnthropicEvent("message_start", payload)
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventMessageStart, events[0].Type)
require.NotNil(t, events[0].Message.Usage)
assert.Equal(t, 25, events[0].Message.Usage.InputTokens)
}
func TestStreamDecoder_ThinkingBlockStart(t *testing.T) {
d := NewStreamDecoder()
payload := map[string]any{
"type": "content_block_start",
"index": 0,
"content_block": map[string]any{
"type": "thinking",
"thinking": "",
},
}
raw := makeAnthropicEvent("content_block_start", payload)
events := d.ProcessChunk(raw)
require.Len(t, events, 1)
assert.Equal(t, canonical.EventContentBlockStart, events[0].Type)
require.NotNil(t, events[0].ContentBlock)
assert.Equal(t, "thinking", events[0].ContentBlock.Type)
}
func TestStreamDecoder_MessageDelta_UsageNotAccumulated(t *testing.T) {
d := NewStreamDecoder()
startPayload := map[string]any{
"type": "message_start",
"message": map[string]any{
"id": "msg_usage_test",
"model": "claude-3",
"usage": map[string]any{"input_tokens": 10, "output_tokens": 0},
},
}
deltaPayload1 := map[string]any{
"type": "message_delta",
"delta": map[string]any{"stop_reason": "end_turn"},
"usage": map[string]any{"output_tokens": 25},
}
d.ProcessChunk(makeAnthropicEvent("message_start", startPayload))
events := d.ProcessChunk(makeAnthropicEvent("message_delta", deltaPayload1))
require.Len(t, events, 1)
assert.Equal(t, 25, events[0].Usage.OutputTokens)
deltaPayload2 := map[string]any{
"type": "message_delta",
"delta": map[string]any{"stop_reason": "end_turn"},
"usage": map[string]any{"output_tokens": 30},
}
events = d.ProcessChunk(makeAnthropicEvent("message_delta", deltaPayload2))
require.Len(t, events, 1)
assert.Equal(t, 30, events[0].Usage.OutputTokens, "output_tokens should be replaced, not accumulated")
assert.Equal(t, 30, d.accumulatedUsage.OutputTokens, "accumulated usage should match last value")
}

View File

@@ -0,0 +1,477 @@
package anthropic
import (
"encoding/json"
"testing"
"time"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDecodeTools(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}],
"tools": [
{"name": "search", "description": "Search", "input_schema": {"type":"object"}},
{"name": "calc", "input_schema": {"type":"object"}}
]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Len(t, req.Tools, 2)
assert.Equal(t, "search", req.Tools[0].Name)
assert.Equal(t, "Search", req.Tools[0].Description)
assert.Equal(t, "calc", req.Tools[1].Name)
}
func TestDecodeToolChoice(t *testing.T) {
tests := []struct {
name string
jsonBody string
wantType string
wantName string
}{
{
"auto string",
`{"model":"claude-3","max_tokens":1024,"messages":[{"role":"user","content":"hi"}],"tool_choice":"auto"}`,
"auto", "",
},
{
"none string",
`{"model":"claude-3","max_tokens":1024,"messages":[{"role":"user","content":"hi"}],"tool_choice":"none"}`,
"none", "",
},
{
"any string",
`{"model":"claude-3","max_tokens":1024,"messages":[{"role":"user","content":"hi"}],"tool_choice":"any"}`,
"any", "",
},
{
"tool object",
`{"model":"claude-3","max_tokens":1024,"messages":[{"role":"user","content":"hi"}],"tool_choice":{"type":"tool","name":"search"}}`,
"tool", "search",
},
{
"auto object",
`{"model":"claude-3","max_tokens":1024,"messages":[{"role":"user","content":"hi"}],"tool_choice":{"type":"auto"}}`,
"auto", "",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
req, err := decodeRequest([]byte(tt.jsonBody))
require.NoError(t, err)
require.NotNil(t, req.ToolChoice)
assert.Equal(t, tt.wantType, req.ToolChoice.Type)
assert.Equal(t, tt.wantName, req.ToolChoice.Name)
})
}
}
func TestDecodeParameters_TopK(t *testing.T) {
topK := 10
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}],
"top_k": 10,
"stop_sequences": ["STOP"]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Parameters.TopK)
assert.Equal(t, topK, *req.Parameters.TopK)
assert.Equal(t, []string{"STOP"}, req.Parameters.StopSequences)
}
func TestDecodeRequest_MetadataUserID(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}],
"metadata": {"user_id": "user-123"}
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, "user-123", req.UserID)
}
func TestDecodeSystem_Empty(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"system": "",
"messages": [{"role": "user", "content": "hi"}]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.System)
}
func TestDecodeSystem_Nil(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.System)
}
func TestDecodeThinking_WithEffort(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}],
"thinking": {"type": "enabled", "budget_tokens": 5000},
"output_config": {"effort": "high"}
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "enabled", req.Thinking.Type)
assert.Equal(t, "high", req.Thinking.Effort)
}
func TestDecodeOutputFormat_NilOutputConfig(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [{"role": "user", "content": "hi"}]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.OutputFormat)
}
func TestDecodeMessage_UserWithOnlyToolResults(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 1024,
"messages": [
{"role": "user", "content": "hi"},
{"role": "assistant", "content": [{"type": "tool_use", "id": "t1", "name": "fn", "input": {}}]},
{
"role": "user",
"content": [{"type": "tool_result", "tool_use_id": "t1", "content": "result"}]
}
]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
lastMsg := req.Messages[len(req.Messages)-1]
assert.Equal(t, canonical.RoleTool, lastMsg.Role)
assert.Equal(t, "t1", lastMsg.Content[0].ToolUseID)
}
func TestDecodeContentBlocks_Nil(t *testing.T) {
blocks := decodeContentBlocks(nil)
assert.Len(t, blocks, 1)
assert.Equal(t, "", blocks[0].Text)
}
func TestDecodeContentBlocks_String(t *testing.T) {
blocks := decodeContentBlocks("hello")
assert.Len(t, blocks, 1)
assert.Equal(t, "hello", blocks[0].Text)
}
func TestParseTimestamp(t *testing.T) {
tests := []struct {
name string
input string
want int64
}{
{"valid RFC3339", "2024-01-15T00:00:00Z", time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC).Unix()},
{"empty", "", 0},
{"invalid", "not-a-date", 0},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
assert.Equal(t, tt.want, parseTimestamp(tt.input))
})
}
}
func TestEncodeToolChoice(t *testing.T) {
tests := []struct {
name string
choice *canonical.ToolChoice
want map[string]any
}{
{"auto", canonical.NewToolChoiceAuto(), map[string]any{"type": "auto"}},
{"none", canonical.NewToolChoiceNone(), map[string]any{"type": "none"}},
{"any", canonical.NewToolChoiceAny(), map[string]any{"type": "any"}},
{"tool", canonical.NewToolChoiceNamed("search"), map[string]any{"type": "tool", "name": "search"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := encodeToolChoice(tt.choice)
assert.Equal(t, tt.want["type"], result.(map[string]any)["type"])
assert.Equal(t, tt.want["name"], result.(map[string]any)["name"])
})
}
}
func TestEncodeThinkingConfig(t *testing.T) {
budget := 5000
tests := []struct {
name string
cfg *canonical.ThinkingConfig
want map[string]any
}{
{"enabled", &canonical.ThinkingConfig{Type: "enabled", BudgetTokens: &budget}, map[string]any{"type": "enabled", "budget_tokens": float64(5000)}},
{"disabled", &canonical.ThinkingConfig{Type: "disabled"}, map[string]any{"type": "disabled"}},
{"adaptive", &canonical.ThinkingConfig{Type: "adaptive"}, map[string]any{"type": "adaptive"}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := encodeThinkingConfig(tt.cfg)
assert.Equal(t, tt.want["type"], result["type"])
})
}
}
func TestEncodeRequest_PublicFields(t *testing.T) {
maxTokens := 1024
parallel := false
req := &canonical.CanonicalRequest{
Model: "claude-3",
Parameters: canonical.RequestParameters{MaxTokens: &maxTokens},
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
UserID: "user-123",
ParallelToolUse: &parallel,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, map[string]any{"user_id": "user-123"}, result["metadata"])
assert.Equal(t, true, result["disable_parallel_tool_use"])
}
func TestEncodeRequest_DefaultMaxTokens(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "claude-3",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, float64(4096), result["max_tokens"])
}
func TestEncodeRequest_TopK(t *testing.T) {
maxTokens := 1024
topK := 10
req := &canonical.CanonicalRequest{
Model: "claude-3",
Parameters: canonical.RequestParameters{MaxTokens: &maxTokens, TopK: &topK},
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, float64(10), result["top_k"])
}
func TestEncodeRequest_WithTools(t *testing.T) {
maxTokens := 1024
req := &canonical.CanonicalRequest{
Model: "claude-3",
Parameters: canonical.RequestParameters{MaxTokens: &maxTokens},
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Tools: []canonical.CanonicalTool{
{Name: "search", Description: "Search things", InputSchema: json.RawMessage(`{"type":"object"}`)},
},
ToolChoice: canonical.NewToolChoiceAuto(),
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
tools := result["tools"].([]any)
assert.Len(t, tools, 1)
tool := tools[0].(map[string]any)
assert.Equal(t, "search", tool["name"])
assert.Equal(t, "Search things", tool["description"])
tc := result["tool_choice"].(map[string]any)
assert.Equal(t, "auto", tc["type"])
}
func TestEncodeRequest_ThinkingWithEffort(t *testing.T) {
maxTokens := 1024
req := &canonical.CanonicalRequest{
Model: "claude-3",
Parameters: canonical.RequestParameters{MaxTokens: &maxTokens},
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Thinking: &canonical.ThinkingConfig{Type: "enabled", Effort: "high"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
oc, ok := result["output_config"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "high", oc["effort"])
}
func TestEncodeResponse_UsageWithCacheAndCreation(t *testing.T) {
cacheRead := 30
cacheCreation := 10
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "msg-1",
Model: "claude-3",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{
InputTokens: 100,
OutputTokens: 50,
CacheReadTokens: &cacheRead,
CacheCreationTokens: &cacheCreation,
},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
usage := result["usage"].(map[string]any)
assert.Equal(t, float64(100), usage["input_tokens"])
assert.Equal(t, float64(30), usage["cache_read_input_tokens"])
assert.Equal(t, float64(10), usage["cache_creation_input_tokens"])
}
func TestEncodeResponse_StopReasons(t *testing.T) {
tests := []struct {
name string
stopReason canonical.StopReason
want string
}{
{"end_turn", canonical.StopReasonEndTurn, "end_turn"},
{"max_tokens", canonical.StopReasonMaxTokens, "max_tokens"},
{"tool_use", canonical.StopReasonToolUse, "tool_use"},
{"stop_sequence", canonical.StopReasonStopSequence, "stop_sequence"},
{"refusal", canonical.StopReasonRefusal, "refusal"},
{"content_filter→end_turn", canonical.StopReasonContentFilter, "end_turn"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sr := tt.stopReason
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "claude-3",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, tt.want, result["stop_reason"])
})
}
}
func TestEncodeSystem_SystemBlocks(t *testing.T) {
result := encodeSystem([]canonical.SystemBlock{{Text: "part1"}, {Text: "part2"}})
blocks, ok := result.([]map[string]any)
require.True(t, ok)
assert.Len(t, blocks, 2)
assert.Equal(t, "part1", blocks[0]["text"])
}
func TestEncodeModelInfoResponse(t *testing.T) {
info := &canonical.CanonicalModelInfo{
ID: "claude-3-opus",
Name: "Claude 3 Opus",
Created: time.Date(2024, 1, 15, 0, 0, 0, 0, time.UTC).Unix(),
}
body, err := encodeModelInfoResponse(info)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "claude-3-opus", result["id"])
assert.Equal(t, "Claude 3 Opus", result["display_name"])
}
func TestDecodeModelInfoResponse(t *testing.T) {
body := []byte(`{"id":"claude-3-opus","type":"model","display_name":"Claude 3 Opus","created_at":"2024-01-15T00:00:00Z"}`)
info, err := decodeModelInfoResponse(body)
require.NoError(t, err)
assert.Equal(t, "claude-3-opus", info.ID)
assert.Equal(t, "Claude 3 Opus", info.Name)
assert.NotEqual(t, int64(0), info.Created)
}
func TestDecodeResponse_PauseTurn(t *testing.T) {
body := []byte(`{
"id": "msg-1", "type": "message", "role": "assistant", "model": "claude-3",
"content": [{"type": "text", "text": "ok"}],
"stop_reason": "pause_turn",
"usage": {"input_tokens": 1, "output_tokens": 1}
}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, canonical.StopReason("pause_turn"), *resp.StopReason)
}
func TestEncodeResponse_NoStopReason(t *testing.T) {
resp := &canonical.CanonicalResponse{
ID: "msg-1",
Model: "claude-3",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "end_turn", result["stop_reason"])
}
func TestDecodeRequest_MaxTokensZero(t *testing.T) {
body := []byte(`{
"model": "claude-3",
"max_tokens": 0,
"messages": [{"role": "user", "content": "hi"}]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.Parameters.MaxTokens)
}

View File

@@ -0,0 +1,114 @@
package canonical
import (
"encoding/json"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestGetSystemString(t *testing.T) {
tests := []struct {
name string
system any
want string
}{
{"string", "hello", "hello"},
{"nil", nil, ""},
{"empty string", "", ""},
{"system blocks", []SystemBlock{{Text: "part1"}, {Text: "part2"}}, "part1\n\npart2"},
{"single block", []SystemBlock{{Text: "only"}}, "only"},
{"other type", 123, "123"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
req := &CanonicalRequest{System: tt.system}
assert.Equal(t, tt.want, req.GetSystemString())
})
}
}
func TestSetSystemString(t *testing.T) {
req := &CanonicalRequest{}
req.SetSystemString("hello")
assert.Equal(t, "hello", req.System)
req.SetSystemString("")
assert.Nil(t, req.System)
}
func TestNewTextBlock(t *testing.T) {
b := NewTextBlock("hello")
assert.Equal(t, "text", b.Type)
assert.Equal(t, "hello", b.Text)
}
func TestNewToolUseBlock(t *testing.T) {
input := json.RawMessage(`{"key":"val"}`)
b := NewToolUseBlock("id-1", "tool_name", input)
assert.Equal(t, "tool_use", b.Type)
assert.Equal(t, "id-1", b.ID)
assert.Equal(t, "tool_name", b.Name)
assert.Equal(t, input, b.Input)
}
func TestNewToolResultBlock(t *testing.T) {
b := NewToolResultBlock("tool-1", "result", false)
assert.Equal(t, "tool_result", b.Type)
assert.Equal(t, "tool-1", b.ToolUseID)
assert.NotNil(t, b.IsError)
assert.False(t, *b.IsError)
}
func TestNewThinkingBlock(t *testing.T) {
b := NewThinkingBlock("thought")
assert.Equal(t, "thinking", b.Type)
assert.Equal(t, "thought", b.Thinking)
}
func TestNewToolChoice(t *testing.T) {
assert.Equal(t, &ToolChoice{Type: "auto"}, NewToolChoiceAuto())
assert.Equal(t, &ToolChoice{Type: "none"}, NewToolChoiceNone())
assert.Equal(t, &ToolChoice{Type: "any"}, NewToolChoiceAny())
assert.Equal(t, &ToolChoice{Type: "tool", Name: "fn"}, NewToolChoiceNamed("fn"))
}
func TestCanonicalRequest_RoundTrip(t *testing.T) {
req := &CanonicalRequest{
Model: "gpt-4",
System: "system prompt",
Messages: []CanonicalMessage{{Role: RoleUser, Content: []ContentBlock{NewTextBlock("hi")}}},
Stream: true,
}
data, err := json.Marshal(req)
require.NoError(t, err)
var decoded CanonicalRequest
require.NoError(t, json.Unmarshal(data, &decoded))
assert.Equal(t, "gpt-4", decoded.Model)
assert.Equal(t, "system prompt", decoded.System)
assert.True(t, decoded.Stream)
}
func TestCanonicalResponse_RoundTrip(t *testing.T) {
sr := StopReasonEndTurn
resp := &CanonicalResponse{
ID: "resp-1",
Model: "gpt-4",
Content: []ContentBlock{NewTextBlock("hello")},
StopReason: &sr,
Usage: CanonicalUsage{InputTokens: 10, OutputTokens: 5},
}
data, err := json.Marshal(resp)
require.NoError(t, err)
var decoded CanonicalResponse
require.NoError(t, json.Unmarshal(data, &decoded))
assert.Equal(t, "resp-1", decoded.ID)
assert.Equal(t, StopReasonEndTurn, *decoded.StopReason)
}

View File

@@ -0,0 +1,323 @@
package conversion
import (
"encoding/json"
"errors"
"testing"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestConversionError_WithProviderProtocol(t *testing.T) {
err := NewConversionError(ErrorCodeInvalidInput, "test").WithProviderProtocol("anthropic")
assert.Equal(t, "anthropic", err.ProviderProtocol)
}
func TestConversionError_WithInterfaceType(t *testing.T) {
err := NewConversionError(ErrorCodeInvalidInput, "test").WithInterfaceType("CHAT")
assert.Equal(t, "CHAT", err.InterfaceType)
}
func TestConversionError_FullBuilder(t *testing.T) {
err := NewConversionError(ErrorCodeInvalidInput, "bad").
WithClientProtocol("openai").
WithProviderProtocol("anthropic").
WithInterfaceType("CHAT").
WithDetail("field", "model").
WithCause(errors.New("root"))
assert.Equal(t, ErrorCodeInvalidInput, err.Code)
assert.Equal(t, "openai", err.ClientProtocol)
assert.Equal(t, "anthropic", err.ProviderProtocol)
assert.Equal(t, "CHAT", err.InterfaceType)
assert.Equal(t, "model", err.Details["field"])
assert.Equal(t, "root", err.Cause.Error())
}
func TestEngine_Use(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
called := false
engine.Use(&testMiddleware{fn: func(req *canonical.CanonicalRequest, cp, pp string, ctx *ConversionContext) (*canonical.CanonicalRequest, error) {
called = true
return req, nil
}})
clientAdapter := newMockAdapter("client", false)
clientAdapter.decodeReqFn = func(raw []byte) (*canonical.CanonicalRequest, error) {
return &canonical.CanonicalRequest{Model: "test"}, nil
}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.encodeReqFn = func(req *canonical.CanonicalRequest, p *TargetProvider) ([]byte, error) {
return json.Marshal(req)
}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
_, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/chat/completions", Method: "POST", Body: []byte(`{}`),
}, "client", "provider", NewTargetProvider("https://example.com", "key", "model"))
require.NoError(t, err)
assert.True(t, called)
}
func TestConvertHttpRequest_DecodeError(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.decodeReqFn = func(raw []byte) (*canonical.CanonicalRequest, error) {
return nil, errors.New("decode failed")
}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(newMockAdapter("provider", false))
_, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/chat/completions", Method: "POST", Body: []byte(`{}`),
}, "client", "provider", NewTargetProvider("", "", ""))
assert.Error(t, err)
}
func TestConvertHttpRequest_EncodeError(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
_ = engine.RegisterAdapter(newMockAdapter("client", false))
providerAdapter := newMockAdapter("provider", false)
providerAdapter.encodeReqFn = func(req *canonical.CanonicalRequest, p *TargetProvider) ([]byte, error) {
return nil, errors.New("encode failed")
}
_ = engine.RegisterAdapter(providerAdapter)
_, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/chat/completions", Method: "POST", Body: []byte(`{}`),
}, "client", "provider", NewTargetProvider("", "", ""))
assert.Error(t, err)
}
func TestConvertHttpResponse_CrossProtocol(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.encodeRespFn = func(resp *canonical.CanonicalResponse) ([]byte, error) {
return json.Marshal(map[string]string{"id": resp.ID})
}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.decodeRespFn = func(raw []byte) (*canonical.CanonicalResponse, error) {
return &canonical.CanonicalResponse{ID: "resp-1", Model: "test"}, nil
}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpResponse(HTTPResponseSpec{
StatusCode: 200, Body: []byte(`{"id":"resp-1"}`),
}, "client", "provider", InterfaceTypeChat)
require.NoError(t, err)
assert.Equal(t, 200, result.StatusCode)
assert.Contains(t, string(result.Body), "resp-1")
}
func TestConvertHttpResponse_DecodeError(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
providerAdapter := newMockAdapter("provider", false)
providerAdapter.decodeRespFn = func(raw []byte) (*canonical.CanonicalResponse, error) {
return nil, errors.New("decode error")
}
_ = engine.RegisterAdapter(providerAdapter)
_ = engine.RegisterAdapter(newMockAdapter("client", false))
_, err := engine.ConvertHttpResponse(HTTPResponseSpec{Body: []byte(`{}`)}, "client", "provider", InterfaceTypeChat)
assert.Error(t, err)
}
func TestConvertHttpRequest_EmbeddingInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.ifaceType = InterfaceTypeEmbeddings
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeEmbeddings: true}
clientAdapter.decodeReqFn = func(raw []byte) (*canonical.CanonicalRequest, error) {
return &canonical.CanonicalRequest{Model: "test"}, nil
}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.ifaceType = InterfaceTypeEmbeddings
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeEmbeddings: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/embeddings", Method: "POST", Body: []byte(`{"model":"text-embedding","input":"hello"}`),
}, "client", "provider", NewTargetProvider("https://example.com", "key", "model"))
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestConvertHttpRequest_RerankInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.ifaceType = InterfaceTypeRerank
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeRerank: true}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.ifaceType = InterfaceTypeRerank
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeRerank: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/rerank", Method: "POST", Body: []byte(`{"model":"rerank","query":"test","documents":["a"]}`),
}, "client", "provider", NewTargetProvider("https://example.com", "key", "model"))
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestConvertHttpResponse_EmbeddingInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeEmbeddings: true}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeEmbeddings: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpResponse(HTTPResponseSpec{
StatusCode: 200, Body: []byte(`{"object":"list","data":[],"model":"test"}`),
}, "client", "provider", InterfaceTypeEmbeddings)
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestConvertHttpResponse_RerankInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeRerank: true}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeRerank: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpResponse(HTTPResponseSpec{
StatusCode: 200, Body: []byte(`{"results":[],"model":"test"}`),
}, "client", "provider", InterfaceTypeRerank)
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestConvertHttpRequest_ModelsInterface_Passthrough(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.ifaceType = InterfaceTypeModels
providerAdapter := newMockAdapter("provider", false)
providerAdapter.ifaceType = InterfaceTypeModels
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
body := []byte(`{"object":"list","data":[]}`)
result, err := engine.ConvertHttpRequest(HTTPRequestSpec{
URL: "/v1/models", Method: "GET", Body: body,
}, "client", "provider", NewTargetProvider("https://example.com", "key", ""))
require.NoError(t, err)
assert.Equal(t, body, result.Body)
}
func TestConvertHttpResponse_ModelsInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeModels: true}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeModels: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpResponse(HTTPResponseSpec{
StatusCode: 200, Body: []byte(`{"object":"list","data":[]}`),
}, "client", "provider", InterfaceTypeModels)
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestConvertHttpResponse_ModelInfoInterface(t *testing.T) {
registry := NewMemoryRegistry()
engine := NewConversionEngine(registry)
clientAdapter := newMockAdapter("client", false)
clientAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeModelInfo: true}
providerAdapter := newMockAdapter("provider", false)
providerAdapter.supportsIface = map[InterfaceType]bool{InterfaceTypeModelInfo: true}
_ = engine.RegisterAdapter(clientAdapter)
_ = engine.RegisterAdapter(providerAdapter)
result, err := engine.ConvertHttpResponse(HTTPResponseSpec{
StatusCode: 200, Body: []byte(`{"id":"gpt-4","object":"model"}`),
}, "client", "provider", InterfaceTypeModelInfo)
require.NoError(t, err)
assert.NotNil(t, result)
}
func TestRegistry_ListProtocols(t *testing.T) {
registry := NewMemoryRegistry()
_ = registry.Register(newMockAdapter("openai", true))
_ = registry.Register(newMockAdapter("anthropic", true))
protocols := registry.ListProtocols()
assert.Len(t, protocols, 2)
assert.Contains(t, protocols, "openai")
assert.Contains(t, protocols, "anthropic")
}
func TestRegistry_ConcurrentAccess(t *testing.T) {
registry := NewMemoryRegistry()
done := make(chan bool, 2)
go func() {
for i := 0; i < 100; i++ {
_ = registry.Register(newMockAdapter("proto-"+string(rune(i)), true))
}
done <- true
}()
go func() {
for i := 0; i < 100; i++ {
_, _ = registry.Get("proto-" + string(rune(i)))
}
_ = registry.ListProtocols()
done <- true
}()
<-done
<-done
}
func TestNewConversionContext(t *testing.T) {
ctx := NewConversionContext(InterfaceTypeChat)
assert.NotEmpty(t, ctx.ConversionID)
assert.Equal(t, InterfaceTypeChat, ctx.InterfaceType)
assert.NotNil(t, ctx.Metadata)
}
type testMiddleware struct {
fn func(req *canonical.CanonicalRequest, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalRequest, error)
}
func (m *testMiddleware) Intercept(req *canonical.CanonicalRequest, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalRequest, error) {
if m.fn != nil {
return m.fn(req, clientProtocol, providerProtocol, ctx)
}
return req, nil
}
func (m *testMiddleware) InterceptStreamEvent(event *canonical.CanonicalStreamEvent, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalStreamEvent, error) {
return event, nil
}
var _ = json.Marshal

View File

@@ -353,3 +353,120 @@ func TestStreamDecoder_MultipleChunks_Text(t *testing.T) {
}
assert.Equal(t, []string{"你好", "世界"}, deltas)
}
func TestStreamDecoder_UTF8Truncation(t *testing.T) {
d := NewStreamDecoder()
chunk := map[string]any{
"id": "chatcmpl-utf8",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{"content": "你"},
},
},
}
data, _ := json.Marshal(chunk)
sseData := []byte("data: " + string(data) + "\n\n")
mid := len(sseData) - 5
part1 := sseData[:mid]
part2 := sseData[mid:]
events1 := d.ProcessChunk(part1)
for _, e := range events1 {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil {
assert.Equal(t, "你", e.Delta.Text)
}
}
events2 := d.ProcessChunk(part2)
_ = events2
}
func TestStreamDecoder_ToolCallSubsequentDelta(t *testing.T) {
d := NewStreamDecoder()
idx := 0
chunk1 := map[string]any{
"id": "chatcmpl-tc",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{
"tool_calls": []any{
map[string]any{
"index": &idx,
"id": "call_1",
"type": "function",
"function": map[string]any{
"name": "get_weather",
"arguments": "",
},
},
},
},
},
},
}
chunk2 := map[string]any{
"id": "chatcmpl-tc",
"model": "gpt-4",
"choices": []any{
map[string]any{
"index": 0,
"delta": map[string]any{
"tool_calls": []any{
map[string]any{
"index": &idx,
"function": map[string]any{
"arguments": "{\"city\":\"Beijing\"}",
},
},
},
},
},
},
}
events1 := d.ProcessChunk(makeChunkSSE(chunk1))
require.NotEmpty(t, events1)
events2 := d.ProcessChunk(makeChunkSSE(chunk2))
require.NotEmpty(t, events2)
foundInputJSON := false
for _, e := range events2 {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil && e.Delta.Type == "input_json_delta" {
foundInputJSON = true
assert.Equal(t, "{\"city\":\"Beijing\"}", e.Delta.PartialJSON)
}
}
assert.True(t, foundInputJSON, "subsequent tool call delta should emit input_json_delta")
}
func TestStreamDecoder_InvalidJSON(t *testing.T) {
d := NewStreamDecoder()
raw := []byte("data: {invalid json}\n\n")
events := d.ProcessChunk(raw)
assert.Nil(t, events)
}
func TestStreamDecoder_NonDataLines(t *testing.T) {
d := NewStreamDecoder()
raw := []byte(": comment line\ndata: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n")
events := d.ProcessChunk(raw)
require.NotEmpty(t, events)
found := false
for _, e := range events {
if e.Type == canonical.EventContentBlockDelta && e.Delta != nil {
found = true
assert.Equal(t, "hi", e.Delta.Text)
}
}
assert.True(t, found)
}

View File

@@ -137,15 +137,10 @@ func (e *StreamEncoder) encodeInputJSONDelta(event canonical.CanonicalStreamEven
}
// 后续 delta仅含 arguments
// 通过 index 查找 tool call
// 使用 canonical 事件中的 index 直接映射到 OpenAI tool_calls index
tcIdx := 0
if event.Index != nil {
for id, idx := range e.toolCallIndexMap {
if idx == tcIdx {
_ = id
break
}
}
tcIdx = *event.Index
}
delta := map[string]any{
"tool_calls": []map[string]any{{

View File

@@ -170,3 +170,116 @@ func TestStreamEncoder_MessageDelta_WithUsage(t *testing.T) {
assert.Contains(t, s, "usage")
assert.Contains(t, s, "prompt_tokens")
}
func TestStreamEncoder_InputJSONDelta_SubsequentDelta(t *testing.T) {
e := NewStreamEncoder()
e.EncodeEvent(canonical.NewContentBlockStartEvent(0, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_1",
Name: "get_weather",
}))
e.EncodeEvent(canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: "{\"city\":",
}))
event := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: "\"Beijing\"}",
})
chunks := e.EncodeEvent(event)
require.NotEmpty(t, chunks)
s := string(chunks[0])
assert.Contains(t, s, "tool_calls")
assert.Contains(t, s, "Beijing")
}
func TestStreamEncoder_MessageStart_NilMessage(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: canonical.EventMessageStart}
chunks := e.EncodeEvent(event)
require.Len(t, chunks, 1)
s := string(chunks[0])
assert.Contains(t, s, "chat.completion.chunk")
}
func TestStreamEncoder_UnknownEvent_ReturnsNil(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: "unknown_type"}
chunks := e.EncodeEvent(event)
assert.Nil(t, chunks)
}
func TestStreamEncoder_ContentBlockDelta_NilDelta(t *testing.T) {
e := NewStreamEncoder()
event := canonical.CanonicalStreamEvent{Type: canonical.EventContentBlockDelta}
chunks := e.EncodeEvent(event)
assert.Nil(t, chunks)
}
func TestStreamEncoder_MultiToolCall_IndexMapping(t *testing.T) {
e := NewStreamEncoder()
e.EncodeEvent(canonical.NewContentBlockStartEvent(0, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_1",
Name: "get_weather",
}))
firstDelta := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `{"city":"北京"}`,
})
chunks := e.EncodeEvent(firstDelta)
require.NotEmpty(t, chunks)
s := string(chunks[0])
assert.Contains(t, s, `"index":0`)
assert.Contains(t, s, "get_weather")
assert.Contains(t, s, "北京")
e.EncodeEvent(canonical.NewContentBlockStopEvent(0))
e.EncodeEvent(canonical.NewContentBlockStartEvent(1, canonical.StreamContentBlock{
Type: "tool_use",
ID: "call_2",
Name: "get_time",
}))
secondDelta := canonical.NewContentBlockDeltaEvent(1, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `{"tz":"Asia/Shanghai"}`,
})
chunks = e.EncodeEvent(secondDelta)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":1`)
assert.Contains(t, s, "get_time")
assert.Contains(t, s, "Asia/Shanghai")
subsequentDelta0 := canonical.NewContentBlockDeltaEvent(0, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `"more_data"`,
})
chunks = e.EncodeEvent(subsequentDelta0)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":0`)
assert.NotContains(t, s, "get_weather")
assert.Contains(t, s, "more_data")
subsequentDelta1 := canonical.NewContentBlockDeltaEvent(1, canonical.StreamDelta{
Type: string(canonical.DeltaTypeInputJSON),
PartialJSON: `"more_time"`,
})
chunks = e.EncodeEvent(subsequentDelta1)
require.NotEmpty(t, chunks)
s = string(chunks[0])
assert.Contains(t, s, `"index":1`)
assert.Contains(t, s, "more_time")
}

View File

@@ -0,0 +1,434 @@
package openai
import (
"encoding/json"
"testing"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/canonical"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestDecodeEmbeddingRequest(t *testing.T) {
body := []byte(`{"model":"text-embedding-3-small","input":"hello world","encoding_format":"float","dimensions":256}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", req.Model)
assert.Equal(t, "hello world", req.Input)
assert.Equal(t, "float", req.EncodingFormat)
require.NotNil(t, req.Dimensions)
assert.Equal(t, 256, *req.Dimensions)
}
func TestDecodeEmbeddingRequest_ArrayInput(t *testing.T) {
body := []byte(`{"model":"text-embedding","input":["hello","world"]}`)
req, err := decodeEmbeddingRequest(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding", req.Model)
inputArr, ok := req.Input.([]any)
require.True(t, ok)
assert.Len(t, inputArr, 2)
}
func TestDecodeEmbeddingRequest_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeEmbeddingResponse(t *testing.T) {
body := []byte(`{
"object": "list",
"data": [{"index": 0, "embedding": [0.1, 0.2, 0.3]}],
"model": "text-embedding-3-small",
"usage": {"prompt_tokens": 5, "total_tokens": 5}
}`)
resp, err := decodeEmbeddingResponse(body)
require.NoError(t, err)
assert.Equal(t, "text-embedding-3-small", resp.Model)
assert.Len(t, resp.Data, 1)
assert.Equal(t, 0, resp.Data[0].Index)
assert.Equal(t, 5, resp.Usage.PromptTokens)
}
func TestDecodeRerankRequest(t *testing.T) {
topN := 3
returnDocs := true
body := []byte(`{"model":"rerank-1","query":"what is AI","documents":["doc1","doc2"],"top_n":3,"return_documents":true}`)
req, err := decodeRerankRequest(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", req.Model)
assert.Equal(t, "what is AI", req.Query)
assert.Equal(t, []string{"doc1", "doc2"}, req.Documents)
require.NotNil(t, req.TopN)
assert.Equal(t, topN, *req.TopN)
require.NotNil(t, req.ReturnDocuments)
assert.Equal(t, returnDocs, *req.ReturnDocuments)
}
func TestDecodeRerankResponse(t *testing.T) {
doc := "relevant doc"
body := []byte(`{
"results": [{"index": 0, "relevance_score": 0.95, "document": "relevant doc"}],
"model": "rerank-1"
}`)
resp, err := decodeRerankResponse(body)
require.NoError(t, err)
assert.Equal(t, "rerank-1", resp.Model)
assert.Len(t, resp.Results, 1)
assert.Equal(t, 0, resp.Results[0].Index)
assert.InDelta(t, 0.95, resp.Results[0].RelevanceScore, 0.001)
require.NotNil(t, resp.Results[0].Document)
assert.Equal(t, doc, *resp.Results[0].Document)
}
func TestDecodeModelInfoResponse(t *testing.T) {
body := []byte(`{"id":"gpt-4","object":"model","created":1700000000,"owned_by":"openai"}`)
info, err := decodeModelInfoResponse(body)
require.NoError(t, err)
assert.Equal(t, "gpt-4", info.ID)
assert.Equal(t, int64(1700000000), info.Created)
assert.Equal(t, "openai", info.OwnedBy)
}
func TestEncodeEmbeddingRequest(t *testing.T) {
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding-3-small",
Input: "hello",
EncodingFormat: "float",
}
provider := conversion.NewTargetProvider("", "key", "my-embedding-model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-embedding-model", result["model"])
assert.Equal(t, "hello", result["input"])
assert.Equal(t, "float", result["encoding_format"])
}
func TestEncodeEmbeddingRequest_WithDimensions(t *testing.T) {
dims := 256
req := &canonical.CanonicalEmbeddingRequest{
Model: "text-embedding",
Input: "test",
Dimensions: &dims,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeEmbeddingRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, float64(256), result["dimensions"])
}
func TestEncodeEmbeddingResponse(t *testing.T) {
resp := &canonical.CanonicalEmbeddingResponse{
Data: []canonical.EmbeddingData{{Index: 0, Embedding: []float64{0.1, 0.2}}},
Model: "text-embedding",
Usage: canonical.EmbeddingUsage{PromptTokens: 3, TotalTokens: 3},
}
body, err := encodeEmbeddingResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "list", result["object"])
assert.Equal(t, "text-embedding", result["model"])
}
func TestEncodeRerankRequest(t *testing.T) {
topN := 5
req := &canonical.CanonicalRerankRequest{
Model: "rerank-1",
Query: "what is AI",
Documents: []string{"doc1", "doc2"},
TopN: &topN,
}
provider := conversion.NewTargetProvider("", "key", "my-rerank-model")
body, err := encodeRerankRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "my-rerank-model", result["model"])
assert.Equal(t, "what is AI", result["query"])
}
func TestEncodeRerankResponse(t *testing.T) {
doc := "relevant passage"
resp := &canonical.CanonicalRerankResponse{
Results: []canonical.RerankResult{
{Index: 0, RelevanceScore: 0.95, Document: &doc},
},
Model: "rerank-1",
}
body, err := encodeRerankResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "rerank-1", result["model"])
results := result["results"].([]any)
assert.Len(t, results, 1)
}
func TestEncodeModelInfoResponse(t *testing.T) {
info := &canonical.CanonicalModelInfo{
ID: "gpt-4",
Name: "GPT-4",
Created: 1700000000,
OwnedBy: "openai",
}
body, err := encodeModelInfoResponse(info)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "gpt-4", result["id"])
assert.Equal(t, "model", result["object"])
}
func TestDecodeEmbeddingResponse_InvalidJSON(t *testing.T) {
_, err := decodeEmbeddingResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankRequest_InvalidJSON(t *testing.T) {
_, err := decodeRerankRequest([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRerankResponse_InvalidJSON(t *testing.T) {
_, err := decodeRerankResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeModelInfoResponse_InvalidJSON(t *testing.T) {
_, err := decodeModelInfoResponse([]byte(`invalid`))
assert.Error(t, err)
}
func TestDecodeRequest_ThinkingNone(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"none"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "disabled", req.Thinking.Type)
}
func TestDecodeRequest_ThinkingMinimal(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"reasoning_effort":"minimal"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
require.NotNil(t, req.Thinking)
assert.Equal(t, "enabled", req.Thinking.Type)
assert.Equal(t, "low", req.Thinking.Effort)
}
func TestDecodeRequest_OutputFormat_Text(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"response_format":{"type":"text"}}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.OutputFormat)
}
func TestDecodeRequest_DeprecatedFunctionCall(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"function_call":"auto","functions":[{"name":"fn1","parameters":{}}]}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, "auto", req.ToolChoice.Type)
assert.Len(t, req.Tools, 1)
}
func TestDecodeRequest_FunctionMessage(t *testing.T) {
body := []byte(`{
"model": "gpt-4",
"messages": [
{"role": "user", "content": "hi"},
{"role": "function", "name": "get_weather", "content": "sunny"}
]
}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Len(t, req.Messages, 2)
assert.Equal(t, canonical.RoleTool, req.Messages[1].Role)
}
func TestDecodeRequest_StopString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":"END"}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Equal(t, []string{"END"}, req.Parameters.StopSequences)
}
func TestDecodeRequest_StopEmptyString(t *testing.T) {
body := []byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stop":""}`)
req, err := decodeRequest(body)
require.NoError(t, err)
assert.Nil(t, req.Parameters.StopSequences)
}
func TestDecodeResponse_EmptyChoices(t *testing.T) {
body := []byte(`{"id":"resp-1","model":"gpt-4","choices":[],"usage":{"prompt_tokens":0,"completion_tokens":0,"total_tokens":0}}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, "resp-1", resp.ID)
assert.Len(t, resp.Content, 1)
assert.Equal(t, "", resp.Content[0].Text)
}
func TestDecodeResponse_FunctionCallFinishReason(t *testing.T) {
body := []byte(`{
"id":"r1","model":"gpt-4",
"choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"function_call"}],
"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}
}`)
resp, err := decodeResponse(body)
require.NoError(t, err)
assert.Equal(t, canonical.StopReasonToolUse, *resp.StopReason)
}
func TestEncodeRequest_DisabledThinking(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
Thinking: &canonical.ThinkingConfig{Type: "disabled"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "none", result["reasoning_effort"])
}
func TestEncodeRequest_OutputFormat_JSONObject(t *testing.T) {
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
OutputFormat: &canonical.OutputFormat{Type: "json_object"},
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
rf, ok := result["response_format"].(map[string]any)
require.True(t, ok)
assert.Equal(t, "json_object", rf["type"])
}
func TestEncodeRequest_PublicFields(t *testing.T) {
parallel := true
req := &canonical.CanonicalRequest{
Model: "gpt-4",
Messages: []canonical.CanonicalMessage{{Role: canonical.RoleUser, Content: []canonical.ContentBlock{canonical.NewTextBlock("hi")}}},
UserID: "user-123",
ParallelToolUse: &parallel,
}
provider := conversion.NewTargetProvider("", "key", "model")
body, err := encodeRequest(req, provider)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
assert.Equal(t, "user-123", result["user"])
assert.Equal(t, true, result["parallel_tool_calls"])
}
func TestEncodeResponse_UsageWithCacheAndReasoning(t *testing.T) {
cache := 80
reasoning := 20
sr := canonical.StopReasonEndTurn
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{
InputTokens: 100,
OutputTokens: 50,
CacheReadTokens: &cache,
ReasoningTokens: &reasoning,
},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
usage := result["usage"].(map[string]any)
assert.Equal(t, float64(100), usage["prompt_tokens"])
ptd, ok := usage["prompt_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(80), ptd["cached_tokens"])
ctd, ok := usage["completion_tokens_details"].(map[string]any)
require.True(t, ok)
assert.Equal(t, float64(20), ctd["reasoning_tokens"])
}
func TestEncodeResponse_StopReasons(t *testing.T) {
tests := []struct {
name string
stopReason canonical.StopReason
want string
}{
{"end_turn→stop", canonical.StopReasonEndTurn, "stop"},
{"max_tokens→length", canonical.StopReasonMaxTokens, "length"},
{"tool_use→tool_calls", canonical.StopReasonToolUse, "tool_calls"},
{"content_filter→content_filter", canonical.StopReasonContentFilter, "content_filter"},
{"stop_sequence→stop", canonical.StopReasonStopSequence, "stop"},
{"refusal→stop", canonical.StopReasonRefusal, "stop"},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
sr := tt.stopReason
resp := &canonical.CanonicalResponse{
ID: "r1",
Model: "gpt-4",
Content: []canonical.ContentBlock{canonical.NewTextBlock("ok")},
StopReason: &sr,
Usage: canonical.CanonicalUsage{},
}
body, err := encodeResponse(resp)
require.NoError(t, err)
var result map[string]any
require.NoError(t, json.Unmarshal(body, &result))
choices := result["choices"].([]any)
choice := choices[0].(map[string]any)
assert.Equal(t, tt.want, choice["finish_reason"])
})
}
}
func TestMapErrorCode_AllCodes(t *testing.T) {
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeInvalidInput))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeMissingRequiredField))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeIncompatibleFeature))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeFieldMappingFailure))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeToolCallParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeJSONParseError))
assert.Equal(t, "invalid_request_error", mapErrorCode(conversion.ErrorCodeProtocolConstraint))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeStreamStateError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeUTF8DecodeError))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeEncodingFailure))
assert.Equal(t, "server_error", mapErrorCode(conversion.ErrorCodeInterfaceNotSupported))
}

View File

@@ -1,6 +1,7 @@
package conversion
import (
"fmt"
"testing"
"nex/backend/internal/conversion/canonical"
@@ -128,3 +129,71 @@ func TestCanonicalStreamConverter_EmptyDecoder(t *testing.T) {
assert.Nil(t, result)
}
func TestCanonicalStreamConverter_MiddlewareError_Continue(t *testing.T) {
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
decoder := &mockStreamDecoder{
chunks: [][]canonical.CanonicalStreamEvent{{event}},
}
encoder := &mockStreamEncoder{
events: [][]byte{[]byte("data: ok\n\n")},
}
chain := NewMiddlewareChain()
chain.Use(&errorMiddleware{})
ctx := NewConversionContext(InterfaceTypeChat)
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic")
result := converter.ProcessChunk([]byte("raw"))
assert.Nil(t, result, "middleware error should cause the event to be skipped (continue)")
}
func TestCanonicalStreamConverter_Flush_MiddlewareError_Continue(t *testing.T) {
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
decoder := &mockStreamDecoder{
flush: []canonical.CanonicalStreamEvent{event},
}
encoder := &mockStreamEncoder{
events: [][]byte{[]byte("data: ok\n\n")},
flush: [][]byte{[]byte("data: encoder_flush\n\n")},
}
chain := NewMiddlewareChain()
chain.Use(&errorMiddleware{})
ctx := NewConversionContext(InterfaceTypeChat)
converter := NewCanonicalStreamConverterWithMiddleware(decoder, encoder, chain, *ctx, "openai", "anthropic")
result := converter.Flush()
assert.Len(t, result, 1)
assert.Equal(t, []byte("data: encoder_flush\n\n"), result[0])
}
func TestCanonicalStreamConverter_Flush_DecoderAndEncoderBothProduce(t *testing.T) {
event := canonical.NewMessageStartEvent("id-1", "gpt-4")
decoder := &mockStreamDecoder{
flush: []canonical.CanonicalStreamEvent{event},
}
encoder := &mockStreamEncoder{
events: [][]byte{[]byte("data: decoder_flush\n\n")},
flush: [][]byte{[]byte("data: encoder_flush\n\n")},
}
converter := NewCanonicalStreamConverter(decoder, encoder)
result := converter.Flush()
assert.Len(t, result, 2)
assert.Equal(t, []byte("data: decoder_flush\n\n"), result[0])
assert.Equal(t, []byte("data: encoder_flush\n\n"), result[1])
}
type errorMiddleware struct{}
func (m *errorMiddleware) Intercept(req *canonical.CanonicalRequest, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalRequest, error) {
return nil, fmt.Errorf("middleware error")
}
func (m *errorMiddleware) InterceptStreamEvent(event *canonical.CanonicalStreamEvent, clientProtocol, providerProtocol string, ctx *ConversionContext) (*canonical.CanonicalStreamEvent, error) {
return nil, fmt.Errorf("stream middleware error")
}

View File

@@ -0,0 +1,165 @@
package handler
import (
"bytes"
"encoding/json"
"net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/domain"
)
func TestProviderHandler_CreateProvider_Success(t *testing.T) {
h := NewProviderHandler(&mockProviderService{})
body, _ := json.Marshal(map[string]string{
"id": "p1",
"name": "Test",
"api_key": "sk-test",
"base_url": "https://api.test.com",
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/api/providers", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.CreateProvider(c)
assert.Equal(t, 201, w.Code)
var result domain.Provider
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &result))
assert.Equal(t, "p1", result.ID)
assert.Contains(t, result.APIKey, "***")
}
func TestProviderHandler_CreateProvider_WithProtocol(t *testing.T) {
h := NewProviderHandler(&mockProviderService{})
body, _ := json.Marshal(map[string]string{
"id": "p1",
"name": "Test",
"api_key": "sk-test",
"base_url": "https://api.test.com",
"protocol": "anthropic",
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/api/providers", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.CreateProvider(c)
assert.Equal(t, 201, w.Code)
}
func TestProviderHandler_UpdateProvider(t *testing.T) {
h := NewProviderHandler(&mockProviderService{
provider: &domain.Provider{ID: "p1", Name: "Updated", APIKey: "***"},
})
body, _ := json.Marshal(map[string]string{"name": "Updated"})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "p1"}}
c.Request = httptest.NewRequest("PUT", "/api/providers/p1", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.UpdateProvider(c)
assert.Equal(t, 200, w.Code)
}
func TestProviderHandler_UpdateProvider_InvalidBody(t *testing.T) {
h := NewProviderHandler(&mockProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "p1"}}
c.Request = httptest.NewRequest("PUT", "/api/providers/p1", nil)
h.UpdateProvider(c)
assert.Equal(t, 400, w.Code)
}
func TestProviderHandler_DeleteProvider(t *testing.T) {
h := NewProviderHandler(&mockProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "p1"}}
c.Request = httptest.NewRequest("DELETE", "/api/providers/p1", bytes.NewReader([]byte{}))
c.Request.Header.Set("Content-Type", "application/json")
h.DeleteProvider(c)
assert.True(t, w.Code == 204 || w.Code == 200)
}
func TestModelHandler_DeleteModel(t *testing.T) {
h := NewModelHandler(&mockModelService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "m1"}}
c.Request = httptest.NewRequest("DELETE", "/api/models/m1", bytes.NewReader([]byte{}))
c.Request.Header.Set("Content-Type", "application/json")
h.DeleteModel(c)
assert.True(t, w.Code == 204 || w.Code == 200)
}
func TestModelHandler_CreateModel_Success(t *testing.T) {
h := NewModelHandler(&mockModelService{})
body, _ := json.Marshal(map[string]string{
"id": "m1",
"provider_id": "p1",
"model_name": "gpt-4",
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/api/models", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.CreateModel(c)
assert.Equal(t, 201, w.Code)
var result domain.Model
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &result))
assert.Equal(t, "m1", result.ID)
}
func TestModelHandler_GetModel(t *testing.T) {
h := NewModelHandler(&mockModelService{
model: &domain.Model{ID: "m1", ModelName: "gpt-4"},
})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "m1"}}
c.Request = httptest.NewRequest("GET", "/api/models/m1", nil)
h.GetModel(c)
assert.Equal(t, 200, w.Code)
var result domain.Model
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &result))
assert.Equal(t, "gpt-4", result.ModelName)
}
func TestModelHandler_UpdateModel(t *testing.T) {
h := NewModelHandler(&mockModelService{
model: &domain.Model{ID: "m1", ModelName: "gpt-4o"},
})
body, _ := json.Marshal(map[string]string{"model_name": "gpt-4o"})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "id", Value: "m1"}}
c.Request = httptest.NewRequest("PUT", "/api/models/m1", bytes.NewReader(body))
c.Request.Header.Set("Content-Type", "application/json")
h.UpdateModel(c)
assert.Equal(t, 200, w.Code)
}

View File

@@ -0,0 +1,761 @@
package handler
import (
"bytes"
"context"
"encoding/json"
"fmt"
"net/http/httptest"
"testing"
"time"
"github.com/gin-gonic/gin"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/conversion"
"nex/backend/internal/conversion/anthropic"
"nex/backend/internal/conversion/openai"
"nex/backend/internal/domain"
"nex/backend/internal/provider"
appErrors "nex/backend/pkg/errors"
)
func init() {
gin.SetMode(gin.TestMode)
}
type mockProxyProviderClient struct {
sendFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error)
sendStreamFn func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error)
}
func (m *mockProxyProviderClient) Send(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
if m.sendFn != nil {
return m.sendFn(ctx, spec)
}
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`{"id":"resp-1","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"hi"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
}, nil
}
func (m *mockProxyProviderClient) SendStream(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
if m.sendStreamFn != nil {
return m.sendStreamFn(ctx, spec)
}
ch := make(chan provider.StreamEvent, 10)
go func() {
defer close(ch)
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"hi\"}}]}\n\n")}
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
ch <- provider.StreamEvent{Done: true}
}()
return ch, nil
}
type mockProxyRoutingService struct {
result *domain.RouteResult
err error
}
func (m *mockProxyRoutingService) Route(modelName string) (*domain.RouteResult, error) {
return m.result, m.err
}
type mockProxyProviderService struct {
providers []domain.Provider
err error
}
func (m *mockProxyProviderService) Create(p *domain.Provider) error { return nil }
func (m *mockProxyProviderService) Get(id string, maskKey bool) (*domain.Provider, error) { return nil, nil }
func (m *mockProxyProviderService) List() ([]domain.Provider, error) { return m.providers, m.err }
func (m *mockProxyProviderService) Update(id string, updates map[string]interface{}) error { return nil }
func (m *mockProxyProviderService) Delete(id string) error { return nil }
type mockProxyStatsService struct{}
func (m *mockProxyStatsService) Record(providerID, modelName string) error { return nil }
func (m *mockProxyStatsService) Get(providerID, modelName string, startDate, endDate *time.Time) ([]domain.UsageStats, error) { return nil, nil }
func (m *mockProxyStatsService) Aggregate(stats []domain.UsageStats, groupBy string) []map[string]interface{} { return nil }
func setupProxyEngine(t *testing.T) *conversion.ConversionEngine {
t.Helper()
registry := conversion.NewMemoryRegistry()
engine := conversion.NewConversionEngine(registry)
require.NoError(t, registry.Register(openai.NewAdapter()))
return engine
}
func newTestProxyHandler(engine *conversion.ConversionEngine, client *mockProxyProviderClient, routingSvc *mockProxyRoutingService, providerSvc *mockProxyProviderService) *ProxyHandler {
return NewProxyHandler(
engine,
client,
routingSvc,
providerSvc,
&mockProxyStatsService{},
)
}
func TestProxyHandler_HandleProxy_MissingProtocol(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/", bytes.NewReader([]byte(`{}`)))
h.HandleProxy(c)
assert.Equal(t, 400, w.Code)
}
func TestProxyHandler_HandleProxy_NonStreamSuccess(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`{"id":"resp-1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"Hello"},"finish_reason":"stop"}],"usage":{"prompt_tokens":5,"completion_tokens":3,"total_tokens":8}}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
var resp map[string]any
require.NoError(t, json.Unmarshal(w.Body.Bytes(), &resp))
assert.Equal(t, "resp-1", resp["id"])
}
func TestProxyHandler_HandleProxy_RoutingError_WithBody(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"unknown","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 404, w.Code)
}
func TestProxyHandler_HandleProxy_ConversionError(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return nil, context.DeadlineExceeded
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleProxy_ClientSendError(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return nil, context.DeadlineExceeded
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleProxy_StreamSuccess(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
ch := make(chan provider.StreamEvent, 10)
go func() {
defer close(ch)
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\"}}]}\n\n")}
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
ch <- provider.StreamEvent{Done: true}
}()
return ch, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
assert.Contains(t, w.Body.String(), "Hello")
}
func TestProxyHandler_HandleProxy_StreamError(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
return nil, context.DeadlineExceeded
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_ForwardPassthrough_GET(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{
providers: []domain.Provider{
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}
func TestProxyHandler_ForwardPassthrough_UnsupportedProtocol(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{}
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "unknown"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/unknown/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 400, w.Code)
}
func TestProxyHandler_ForwardPassthrough_NoProviders(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{providers: []domain.Provider{}}
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 404, w.Code)
}
func TestExtractModelName(t *testing.T) {
tests := []struct {
name string
body string
want string
}{
{"basic", `{"model":"gpt-4","messages":[]}`, "gpt-4"},
{"nested", `{"stream":true,"model":"claude-3","messages":[]}`, "claude-3"},
{"no_model", `{"messages":[]}`, ""},
{"empty", "", ""},
{"escaped", `{"model":"gpt\"4","messages":[]}`, `gpt\"4`},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := extractModelName([]byte(tt.body))
assert.Equal(t, tt.want, got)
})
}
}
func TestExtractHeaders(t *testing.T) {
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/", nil)
c.Request.Header.Set("Authorization", "Bearer test")
c.Request.Header.Set("Content-Type", "application/json")
headers := extractHeaders(c)
assert.Equal(t, "Bearer test", headers["Authorization"])
assert.Equal(t, "application/json", headers["Content-Type"])
}
func TestIsStreamRequest(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
tests := []struct {
name string
body string
path string
expected bool
}{
{"stream true chat", `{"model":"gpt-4","stream":true}`, "/v1/chat/completions", true},
{"stream false chat", `{"model":"gpt-4","stream":false}`, "/v1/chat/completions", false},
{"no stream field", `{"model":"gpt-4"}`, "/v1/chat/completions", false},
{"stream true non-chat", `{"model":"gpt-4","stream":true}`, "/v1/models", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := h.isStreamRequest([]byte(tt.body), "openai", tt.path)
assert.Equal(t, tt.expected, result)
})
}
}
func TestProxyHandler_HandleProxy_ProviderProtocolDefault(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}
func TestProxyHandler_WriteConversionError_NonConversionError(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/", nil)
h.writeConversionError(c, context.DeadlineExceeded, "openai")
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_WriteConversionError_ConversionError(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/", nil)
convErr := conversion.NewConversionError(conversion.ErrorCodeInvalidInput, "bad request")
h.writeConversionError(c, convErr, "openai")
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleProxy_EmptyBody(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{
providers: []domain.Provider{
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Body: []byte(`{"object":"list","data":[]}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}
func TestProxyHandler_HandleStream_MidStreamError(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
ch := make(chan provider.StreamEvent, 10)
go func() {
defer close(ch)
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"}}]}\n\n")}
ch <- provider.StreamEvent{Error: fmt.Errorf("connection reset by peer")}
}()
return ch, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
body := w.Body.String()
assert.Contains(t, body, "Hello")
}
func TestProxyHandler_HandleStream_FlushOutput(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendStreamFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (<-chan provider.StreamEvent, error) {
ch := make(chan provider.StreamEvent, 10)
go func() {
defer close(ch)
ch <- provider.StreamEvent{Data: []byte("data: {\"id\":\"1\",\"object\":\"chat.completion.chunk\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hi\"}}]}\n\n")}
ch <- provider.StreamEvent{Data: []byte("data: [DONE]\n\n")}
ch <- provider.StreamEvent{Done: true}
}()
return ch, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
assert.Equal(t, "text/event-stream", w.Header().Get("Content-Type"))
assert.Equal(t, "no-cache", w.Header().Get("Cache-Control"))
assert.Equal(t, "keep-alive", w.Header().Get("Connection"))
body := w.Body.String()
assert.Contains(t, body, "Hi")
assert.Contains(t, body, "[DONE]")
}
func TestProxyHandler_HandleStream_CreateStreamConverterError(t *testing.T) {
registry := conversion.NewMemoryRegistry()
engine := conversion.NewConversionEngine(registry)
err := registry.Register(openai.NewAdapter())
require.NoError(t, err)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleStream_ConvertRequestError(t *testing.T) {
registry := conversion.NewMemoryRegistry()
engine := conversion.NewConversionEngine(registry)
require.NoError(t, registry.Register(openai.NewAdapter()))
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "nonexistent", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}],"stream":true}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleNonStream_ConvertResponseError(t *testing.T) {
registry := conversion.NewMemoryRegistry()
engine := conversion.NewConversionEngine(registry)
require.NoError(t, registry.Register(openai.NewAdapter()))
require.NoError(t, registry.Register(anthropic.NewAdapter()))
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "claude-3", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`invalid json`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"claude-3","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 500, w.Code)
}
func TestProxyHandler_HandleNonStream_ResponseHeaders(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{
result: &domain.RouteResult{
Provider: &domain.Provider{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai", Enabled: true},
Model: &domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4", Enabled: true},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json", "X-Custom": "test-value"},
Body: []byte(`{"id":"r1","object":"chat.completion","model":"gpt-4","choices":[{"index":0,"message":{"role":"assistant","content":"ok"},"finish_reason":"stop"}],"usage":{"prompt_tokens":1,"completion_tokens":1,"total_tokens":2}}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/chat/completions"}}
c.Request = httptest.NewRequest("POST", "/openai/v1/chat/completions", bytes.NewReader([]byte(`{"model":"gpt-4","messages":[{"role":"user","content":"hi"}]}`)))
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
assert.Equal(t, "test-value", w.Header().Get("X-Custom"))
assert.Equal(t, "application/json", w.Header().Get("Content-Type"))
}
func TestProxyHandler_ForwardPassthrough_CrossProtocol(t *testing.T) {
registry := conversion.NewMemoryRegistry()
engine := conversion.NewConversionEngine(registry)
require.NoError(t, registry.Register(openai.NewAdapter()))
anthropicAdapter := anthropic.NewAdapter()
require.NoError(t, registry.Register(anthropicAdapter))
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{
providers: []domain.Provider{
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "anthropic"},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`{"object":"list","data":[]}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}
func TestProxyHandler_ForwardPassthrough_NoBody_NoModel(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{
providers: []domain.Provider{
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Headers: map[string]string{"Content-Type": "application/json"},
Body: []byte(`{"object":"list","data":[{"id":"gpt-4","object":"model"}]}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}
func TestIsStreamRequest_EdgeCases(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
tests := []struct {
name string
body string
path string
expected bool
}{
{"stream at end of JSON", `{"messages":[],"stream":true}`, "/v1/chat/completions", true},
{"stream with spaces", `{"stream" : true}`, "/v1/chat/completions", true},
{"stream embedded in string value", `{"model":"stream:true"}`, "/v1/chat/completions", false},
{"empty body", "", "/v1/chat/completions", false},
{"stream true embeddings", `{"model":"text-emb","stream":true}`, "/v1/embeddings", false},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := h.isStreamRequest([]byte(tt.body), "openai", tt.path)
assert.Equal(t, tt.expected, result)
})
}
}
func TestProxyHandler_WriteError_RouteError(t *testing.T) {
engine := setupProxyEngine(t)
h := newTestProxyHandler(engine, &mockProxyProviderClient{}, &mockProxyRoutingService{}, &mockProxyProviderService{})
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Request = httptest.NewRequest("POST", "/", nil)
h.writeError(c, fmt.Errorf("model not found"), "openai")
assert.Equal(t, 404, w.Code)
}
func TestProxyHandler_HandleProxy_RouteEmptyBody_NoModel(t *testing.T) {
engine := setupProxyEngine(t)
routingSvc := &mockProxyRoutingService{err: appErrors.ErrModelNotFound}
providerSvc := &mockProxyProviderService{
providers: []domain.Provider{
{ID: "p1", Name: "Test", APIKey: "sk-test", BaseURL: "https://api.test.com", Protocol: "openai"},
},
}
client := &mockProxyProviderClient{
sendFn: func(ctx context.Context, spec conversion.HTTPRequestSpec) (*conversion.HTTPResponseSpec, error) {
return &conversion.HTTPResponseSpec{
StatusCode: 200,
Body: []byte(`{"object":"list","data":[]}`),
}, nil
},
}
h := newTestProxyHandler(engine, client, routingSvc, providerSvc)
w := httptest.NewRecorder()
c, _ := gin.CreateTestContext(w)
c.Params = gin.Params{{Key: "protocol", Value: "openai"}, {Key: "path", Value: "/models"}}
c.Request = httptest.NewRequest("GET", "/openai/v1/models", nil)
h.HandleProxy(c)
assert.Equal(t, 200, w.Code)
}

View File

@@ -173,22 +173,22 @@ func (c *Client) readStream(ctx context.Context, cancel context.CancelFunc, body
}
n, err := body.Read(buf)
if n > 0 {
dataBuf = append(dataBuf, buf[:n]...)
}
if err != nil {
if err == io.EOF {
if err != io.EOF {
if isNetworkError(err) {
c.logger.Error("流网络错误", zap.String("error", err.Error()))
eventChan <- StreamEvent{Error: fmt.Errorf("网络错误: %w", err)}
} else {
c.logger.Error("流读取错误", zap.String("error", err.Error()))
eventChan <- StreamEvent{Error: fmt.Errorf("读取错误: %w", err)}
}
return
}
if isNetworkError(err) {
c.logger.Error("流网络错误", zap.String("error", err.Error()))
eventChan <- StreamEvent{Error: fmt.Errorf("网络错误: %w", err)}
} else {
c.logger.Error("流读取错误", zap.String("error", err.Error()))
eventChan <- StreamEvent{Error: fmt.Errorf("读取错误: %w", err)}
}
return
}
dataBuf = append(dataBuf, buf[:n]...)
if len(dataBuf) > bufSize/2 && bufSize < c.streamCfg.MaxBufferSize {
newSize := bufSize * 2
if newSize > c.streamCfg.MaxBufferSize {
@@ -214,6 +214,10 @@ func (c *Client) readStream(ctx context.Context, cancel context.CancelFunc, body
eventChan <- StreamEvent{Data: rawEvent}
}
if err == io.EOF {
return
}
}
}

View File

@@ -6,6 +6,7 @@ import (
"net/http"
"net/http/httptest"
"testing"
"time"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -129,6 +130,184 @@ func TestClient_SendStream_ErrorResponse(t *testing.T) {
assert.Error(t, err)
}
func TestClient_SendStream_SSEEvents(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher, ok := w.(http.Flusher)
require.True(t, ok)
w.Write([]byte("data: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\"Hello\"}}]}\n\n"))
flusher.Flush()
w.Write([]byte("data: {\"id\":\"1\",\"choices\":[{\"delta\":{\"content\":\" World\"}}]}\n\n"))
flusher.Flush()
time.Sleep(50 * time.Millisecond)
w.Write([]byte("data: [DONE]\n\n"))
flusher.Flush()
time.Sleep(50 * time.Millisecond)
}))
defer server.Close()
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{"model":"gpt-4","messages":[],"stream":true}`),
}
eventChan, err := client.SendStream(context.Background(), spec)
require.NoError(t, err)
var dataEvents [][]byte
var doneEvents int
for event := range eventChan {
if event.Done {
doneEvents++
} else if event.Error != nil {
t.Fatalf("unexpected error: %v", event.Error)
} else {
dataEvents = append(dataEvents, event.Data)
}
}
assert.Equal(t, 2, len(dataEvents), "expected exactly 2 data events from SSE stream")
assert.Contains(t, string(dataEvents[0]), "Hello")
assert.Contains(t, string(dataEvents[1]), "World")
assert.Equal(t, 1, doneEvents)
}
func TestClient_SendStream_ContextCancellation(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
time.Sleep(10 * time.Second)
}))
defer server.Close()
ctx, cancel := context.WithCancel(context.Background())
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{}`),
}
eventChan, err := client.SendStream(ctx, spec)
require.NoError(t, err)
cancel()
var gotError bool
for event := range eventChan {
if event.Error != nil {
gotError = true
}
}
assert.True(t, gotError)
}
func TestClient_Send_EmptyBody(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "GET", r.Method)
w.WriteHeader(http.StatusOK)
w.Write([]byte(`{"result":"ok"}`))
}))
defer server.Close()
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/models",
Method: "GET",
Headers: map[string]string{"Authorization": "Bearer test-key"},
}
result, err := client.Send(context.Background(), spec)
require.NoError(t, err)
assert.Equal(t, 200, result.StatusCode)
assert.Contains(t, string(result.Body), "ok")
}
func TestClient_SendStream_SlowSSE(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher, ok := w.(http.Flusher)
require.True(t, ok)
w.Write([]byte("data: {\"id\":\"1\"}\n\n"))
flusher.Flush()
time.Sleep(100 * time.Millisecond)
w.Write([]byte("data: [DONE]\n\n"))
flusher.Flush()
time.Sleep(100 * time.Millisecond)
}))
defer server.Close()
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{}`),
}
eventChan, err := client.SendStream(context.Background(), spec)
require.NoError(t, err)
var dataCount int
var doneCount int
for event := range eventChan {
if event.Done {
doneCount++
} else if event.Error != nil {
t.Fatalf("unexpected error: %v", event.Error)
} else {
dataCount++
}
}
assert.Equal(t, 1, dataCount, "expected exactly 1 data event from slow SSE")
assert.Equal(t, 1, doneCount, "expected exactly 1 done event from slow SSE")
}
func TestClient_SendStream_SplitSSEEvents(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher, ok := w.(http.Flusher)
require.True(t, ok)
w.Write([]byte("data: {\"id\":\"1\"}\n\ndata: {\"id\":\"2\"}\n\n"))
flusher.Flush()
time.Sleep(50 * time.Millisecond)
w.Write([]byte("data: [DONE]\n\n"))
flusher.Flush()
time.Sleep(50 * time.Millisecond)
}))
defer server.Close()
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{}`),
}
eventChan, err := client.SendStream(context.Background(), spec)
require.NoError(t, err)
var dataEvents int
var doneEvents int
for event := range eventChan {
if event.Done {
doneEvents++
} else {
dataEvents++
}
}
assert.Equal(t, 2, dataEvents, "expected exactly 2 data events from split SSE")
assert.Equal(t, 1, doneEvents)
}
func TestIsNetworkError(t *testing.T) {
tests := []struct {
input string
@@ -147,3 +326,42 @@ func TestIsNetworkError(t *testing.T) {
assert.Equal(t, tt.want, isNetworkError(err), "isNetworkError(%q)", tt.input)
}
}
func TestClient_SendStream_MidStreamNetworkError(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "text/event-stream")
w.WriteHeader(http.StatusOK)
flusher, ok := w.(http.Flusher)
require.True(t, ok)
w.Write([]byte("data: {\"id\":\"1\"}\n\n"))
flusher.Flush()
time.Sleep(50 * time.Millisecond)
if hijacker, ok := w.(http.Hijacker); ok {
conn, _, _ := hijacker.Hijack()
if conn != nil {
conn.Close()
}
}
}))
defer server.Close()
client := NewClient()
spec := conversion.HTTPRequestSpec{
URL: server.URL + "/v1/chat/completions",
Method: "POST",
Headers: map[string]string{"Authorization": "Bearer test-key"},
Body: []byte(`{}`),
}
eventChan, err := client.SendStream(context.Background(), spec)
require.NoError(t, err)
var gotData bool
for event := range eventChan {
if event.Error != nil {
} else if !event.Done {
gotData = true
}
}
assert.True(t, gotData, "should have received at least one data event before error")
}

View File

@@ -0,0 +1,154 @@
package service
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"nex/backend/internal/domain"
"nex/backend/internal/repository"
)
func TestProviderService_Update(t *testing.T) {
db := setupServiceTestDB(t)
repo := repository.NewProviderRepository(db)
svc := NewProviderService(repo)
svc.Create(&domain.Provider{ID: "p1", Name: "Original", APIKey: "key", BaseURL: "https://test.com"})
err := svc.Update("p1", map[string]interface{}{"name": "Updated"})
require.NoError(t, err)
result, err := svc.Get("p1", false)
require.NoError(t, err)
assert.Equal(t, "Updated", result.Name)
}
func TestProviderService_Update_NotFound(t *testing.T) {
db := setupServiceTestDB(t)
repo := repository.NewProviderRepository(db)
svc := NewProviderService(repo)
err := svc.Update("nonexistent", map[string]interface{}{"name": "test"})
assert.Error(t, err)
}
func TestModelService_Get(t *testing.T) {
db := setupServiceTestDB(t)
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
svc := NewModelService(modelRepo, providerRepo)
providerRepo.Create(&domain.Provider{ID: "p1", Name: "P1", APIKey: "key", BaseURL: "https://test.com"})
svc.Create(&domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4"})
model, err := svc.Get("m1")
require.NoError(t, err)
assert.Equal(t, "gpt-4", model.ModelName)
}
func TestModelService_Update(t *testing.T) {
db := setupServiceTestDB(t)
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
svc := NewModelService(modelRepo, providerRepo)
providerRepo.Create(&domain.Provider{ID: "p1", Name: "P1", APIKey: "key", BaseURL: "https://test.com"})
svc.Create(&domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4"})
err := svc.Update("m1", map[string]interface{}{"model_name": "gpt-4o"})
require.NoError(t, err)
model, err := svc.Get("m1")
require.NoError(t, err)
assert.Equal(t, "gpt-4o", model.ModelName)
}
func TestModelService_Update_ProviderID_Invalid(t *testing.T) {
db := setupServiceTestDB(t)
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
svc := NewModelService(modelRepo, providerRepo)
providerRepo.Create(&domain.Provider{ID: "p1", Name: "P1", APIKey: "key", BaseURL: "https://test.com"})
svc.Create(&domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4"})
err := svc.Update("m1", map[string]interface{}{"provider_id": "nonexistent"})
assert.Error(t, err)
}
func TestModelService_Delete(t *testing.T) {
db := setupServiceTestDB(t)
providerRepo := repository.NewProviderRepository(db)
modelRepo := repository.NewModelRepository(db)
svc := NewModelService(modelRepo, providerRepo)
providerRepo.Create(&domain.Provider{ID: "p1", Name: "P1", APIKey: "key", BaseURL: "https://test.com"})
svc.Create(&domain.Model{ID: "m1", ProviderID: "p1", ModelName: "gpt-4"})
err := svc.Delete("m1")
require.NoError(t, err)
_, err = svc.Get("m1")
assert.Error(t, err)
}
func TestModelService_Delete_NotFound(t *testing.T) {
db := setupServiceTestDB(t)
modelRepo := repository.NewModelRepository(db)
providerRepo := repository.NewProviderRepository(db)
svc := NewModelService(modelRepo, providerRepo)
err := svc.Delete("nonexistent")
assert.Error(t, err)
}
func TestStatsService_Aggregate_ByModel(t *testing.T) {
statsRepo := repository.NewStatsRepository(nil)
svc := NewStatsService(statsRepo)
stats := []domain.UsageStats{
{ProviderID: "p1", ModelName: "gpt-4", RequestCount: 10},
{ProviderID: "p1", ModelName: "gpt-4", RequestCount: 5},
{ProviderID: "p2", ModelName: "gpt-4", RequestCount: 8},
}
result := svc.Aggregate(stats, "model")
assert.True(t, len(result) >= 1)
totalCount := 0
for _, r := range result {
totalCount += r["request_count"].(int)
}
assert.Equal(t, 23, totalCount)
}
func TestStatsService_Aggregate_Default(t *testing.T) {
statsRepo := repository.NewStatsRepository(nil)
svc := NewStatsService(statsRepo)
stats := []domain.UsageStats{
{ProviderID: "p1", RequestCount: 10},
{ProviderID: "p2", RequestCount: 5},
}
result := svc.Aggregate(stats, "")
assert.Len(t, result, 2)
totalCount := 0
for _, r := range result {
totalCount += r["request_count"].(int)
}
assert.Equal(t, 15, totalCount)
}
func TestModelService_Update_NotFound(t *testing.T) {
db := setupServiceTestDB(t)
modelRepo := repository.NewModelRepository(db)
providerRepo := repository.NewProviderRepository(db)
svc := NewModelService(modelRepo, providerRepo)
err := svc.Update("nonexistent", map[string]interface{}{"model_name": "test"})
assert.Error(t, err)
}

File diff suppressed because it is too large Load Diff